summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/bootstrap.yml33
-rw-r--r--.github/workflows/build-containers.yml6
-rw-r--r--.github/workflows/unit_tests.yaml83
-rw-r--r--CITATION.cff58
-rw-r--r--COPYRIGHT12
-rw-r--r--README.md3
-rwxr-xr-xbin/spack6
-rw-r--r--lib/spack/docs/analyze.rst2
-rw-r--r--lib/spack/docs/build_systems/autotoolspackage.rst18
-rw-r--r--lib/spack/docs/build_systems/cmakepackage.rst14
-rw-r--r--lib/spack/docs/build_systems/pythonpackage.rst13
-rw-r--r--lib/spack/docs/conf.py1
-rw-r--r--lib/spack/docs/contribution_guide.rst2
-rw-r--r--lib/spack/docs/developer_guide.rst11
-rw-r--r--lib/spack/docs/module_file_support.rst144
-rw-r--r--lib/spack/docs/tables/system_prerequisites.csv2
-rwxr-xr-xlib/spack/env/cc2
l---------lib/spack/env/oneapi/dpcpp1
-rw-r--r--lib/spack/external/__init__.py54
-rw-r--r--lib/spack/external/attr/LICENSE21
-rw-r--r--lib/spack/external/attr/__init__.py78
-rw-r--r--lib/spack/external/attr/_cmp.py152
-rw-r--r--lib/spack/external/attr/_compat.py242
-rw-r--r--lib/spack/external/attr/_config.py23
-rw-r--r--lib/spack/external/attr/_funcs.py395
-rw-r--r--lib/spack/external/attr/_make.py3052
-rw-r--r--lib/spack/external/attr/_next_gen.py158
-rw-r--r--lib/spack/external/attr/_version_info.py85
-rw-r--r--lib/spack/external/attr/converters.py111
-rw-r--r--lib/spack/external/attr/exceptions.py92
-rw-r--r--lib/spack/external/attr/filters.py52
-rw-r--r--lib/spack/external/attr/setters.py77
-rw-r--r--lib/spack/external/attr/validators.py379
-rw-r--r--lib/spack/external/ctest_log_parser.py4
-rw-r--r--lib/spack/external/distro.py716
-rw-r--r--lib/spack/external/functools_backport.py47
-rw-r--r--lib/spack/external/jinja2/LICENSE.rst28
-rw-r--r--lib/spack/external/jinja2/__init__.py123
-rw-r--r--lib/spack/external/jinja2/_compat.py67
-rw-r--r--lib/spack/external/jinja2/_identifier.py6
-rw-r--r--lib/spack/external/jinja2/asyncfilters.py70
-rw-r--r--lib/spack/external/jinja2/asyncsupport.py186
-rw-r--r--lib/spack/external/jinja2/bccache.py142
-rw-r--r--lib/spack/external/jinja2/compiler.py1242
-rw-r--r--lib/spack/external/jinja2/constants.py15
-rw-r--r--lib/spack/external/jinja2/debug.py522
-rw-r--r--lib/spack/external/jinja2/defaults.py68
-rw-r--r--lib/spack/external/jinja2/environment.py592
-rw-r--r--lib/spack/external/jinja2/exceptions.py73
-rw-r--r--lib/spack/external/jinja2/ext.py387
-rw-r--r--lib/spack/external/jinja2/filters.py716
-rw-r--r--lib/spack/external/jinja2/idtracking.py58
-rw-r--r--lib/spack/external/jinja2/lexer.py775
-rw-r--r--lib/spack/external/jinja2/loaders.py135
-rw-r--r--lib/spack/external/jinja2/meta.py33
-rw-r--r--lib/spack/external/jinja2/nativetypes.py248
-rw-r--r--lib/spack/external/jinja2/nodes.py427
-rw-r--r--lib/spack/external/jinja2/optimizer.py60
-rw-r--r--lib/spack/external/jinja2/parser.py568
-rw-r--r--lib/spack/external/jinja2/runtime.py704
-rw-r--r--lib/spack/external/jinja2/sandbox.py239
-rw-r--r--lib/spack/external/jinja2/tests.py145
-rw-r--r--lib/spack/external/jinja2/utils.py410
-rw-r--r--lib/spack/external/jinja2/visitor.py14
-rw-r--r--lib/spack/external/jsonschema/README.rst104
-rw-r--r--lib/spack/external/jsonschema/__init__.py31
-rw-r--r--lib/spack/external/jsonschema/_format.py287
-rw-r--r--lib/spack/external/jsonschema/_legacy_validators.py141
-rw-r--r--lib/spack/external/jsonschema/_types.py188
-rw-r--r--lib/spack/external/jsonschema/_utils.py29
-rw-r--r--lib/spack/external/jsonschema/_validators.py279
-rw-r--r--lib/spack/external/jsonschema/cli.py36
-rw-r--r--lib/spack/external/jsonschema/compat.py46
-rw-r--r--lib/spack/external/jsonschema/exceptions.py164
-rw-r--r--lib/spack/external/jsonschema/schemas/draft3.json4
-rw-r--r--lib/spack/external/jsonschema/schemas/draft4.json7
-rw-r--r--lib/spack/external/jsonschema/schemas/draft6.json153
-rw-r--r--lib/spack/external/jsonschema/schemas/draft7.json166
-rw-r--r--lib/spack/external/jsonschema/tests/__init__.py0
-rw-r--r--lib/spack/external/jsonschema/tests/compat.py15
-rw-r--r--lib/spack/external/jsonschema/tests/test_cli.py110
-rw-r--r--lib/spack/external/jsonschema/tests/test_exceptions.py382
-rw-r--r--lib/spack/external/jsonschema/tests/test_format.py63
-rw-r--r--lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py290
-rw-r--r--lib/spack/external/jsonschema/tests/test_validators.py786
-rw-r--r--lib/spack/external/jsonschema/validators.py891
-rw-r--r--lib/spack/external/markupsafe/AUTHORS13
-rw-r--r--lib/spack/external/markupsafe/LICENSE33
-rw-r--r--lib/spack/external/markupsafe/LICENSE.rst28
-rw-r--r--lib/spack/external/markupsafe/README.rst138
-rw-r--r--lib/spack/external/markupsafe/__init__.py273
-rw-r--r--lib/spack/external/markupsafe/_compat.py23
-rw-r--r--lib/spack/external/markupsafe/_constants.py517
-rw-r--r--lib/spack/external/markupsafe/_native.py67
-rw-r--r--lib/spack/external/ordereddict_backport.py22
-rw-r--r--lib/spack/external/py2/functools32/LICENSE289
-rw-r--r--lib/spack/external/py2/functools32/__init__.py1
-rw-r--r--lib/spack/external/py2/functools32/_dummy_thread32.py158
-rw-r--r--lib/spack/external/py2/functools32/functools32.py423
-rw-r--r--lib/spack/external/py2/functools32/reprlib32.py157
-rw-r--r--lib/spack/external/py26/ordereddict.py127
-rw-r--r--lib/spack/external/pyrsistent/LICENSE22
-rw-r--r--lib/spack/external/pyrsistent/__init__.py6
-rw-r--r--lib/spack/external/pyrsistent/_compat.py31
-rw-r--r--lib/spack/external/pyrsistent/_pmap.py460
-rw-r--r--lib/spack/external/pyrsistent/_pvector.py713
-rw-r--r--lib/spack/external/pyrsistent/_transformations.py143
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/LICENSE (renamed from lib/spack/external/_pytest/LICENSE)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/__init__.py (renamed from lib/spack/external/_pytest/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_argcomplete.py (renamed from lib/spack/external/_pytest/_argcomplete.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/__init__.py (renamed from lib/spack/external/_pytest/_code/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py (renamed from lib/spack/external/_pytest/_code/_py2traceback.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/code.py (renamed from lib/spack/external/_pytest/_code/code.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_code/source.py (renamed from lib/spack/external/_pytest/_code/source.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_pluggy.py (renamed from lib/spack/external/_pytest/_pluggy.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/_version.py (renamed from lib/spack/external/_pytest/_version.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py (renamed from lib/spack/external/_pytest/assertion/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py (renamed from lib/spack/external/_pytest/assertion/rewrite.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py (renamed from lib/spack/external/_pytest/assertion/truncate.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/assertion/util.py (renamed from lib/spack/external/_pytest/assertion/util.py)0
-rwxr-xr-xlib/spack/external/pytest-fallback/_pytest/cacheprovider.py (renamed from lib/spack/external/_pytest/cacheprovider.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/capture.py (renamed from lib/spack/external/_pytest/capture.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/compat.py (renamed from lib/spack/external/_pytest/compat.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/config.py (renamed from lib/spack/external/_pytest/config.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/debugging.py (renamed from lib/spack/external/_pytest/debugging.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/deprecated.py (renamed from lib/spack/external/_pytest/deprecated.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/doctest.py (renamed from lib/spack/external/_pytest/doctest.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/fixtures.py (renamed from lib/spack/external/_pytest/fixtures.py)8
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/freeze_support.py (renamed from lib/spack/external/_pytest/freeze_support.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/helpconfig.py (renamed from lib/spack/external/_pytest/helpconfig.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/hookspec.py (renamed from lib/spack/external/_pytest/hookspec.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/junitxml.py (renamed from lib/spack/external/_pytest/junitxml.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/main.py (renamed from lib/spack/external/_pytest/main.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/mark.py (renamed from lib/spack/external/_pytest/mark.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/monkeypatch.py (renamed from lib/spack/external/_pytest/monkeypatch.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/nodes.py (renamed from lib/spack/external/_pytest/nodes.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/nose.py (renamed from lib/spack/external/_pytest/nose.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/outcomes.py (renamed from lib/spack/external/_pytest/outcomes.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/pastebin.py (renamed from lib/spack/external/_pytest/pastebin.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/pytester.py (renamed from lib/spack/external/_pytest/pytester.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/python.py (renamed from lib/spack/external/_pytest/python.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/python_api.py (renamed from lib/spack/external/_pytest/python_api.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/recwarn.py (renamed from lib/spack/external/_pytest/recwarn.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/resultlog.py (renamed from lib/spack/external/_pytest/resultlog.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/runner.py (renamed from lib/spack/external/_pytest/runner.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/setuponly.py (renamed from lib/spack/external/_pytest/setuponly.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/setupplan.py (renamed from lib/spack/external/_pytest/setupplan.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/skipping.py (renamed from lib/spack/external/_pytest/skipping.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/terminal.py (renamed from lib/spack/external/_pytest/terminal.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/tmpdir.py (renamed from lib/spack/external/_pytest/tmpdir.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/unittest.py (renamed from lib/spack/external/_pytest/unittest.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md (renamed from lib/spack/external/_pytest/vendored_packages/README.md)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py (renamed from lib/spack/external/_pytest/vendored_packages/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt (renamed from lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py (renamed from lib/spack/external/_pytest/vendored_packages/pluggy.py)0
-rw-r--r--lib/spack/external/pytest-fallback/_pytest/warnings.py (renamed from lib/spack/external/_pytest/warnings.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/__init__.py (renamed from lib/spack/external/py/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/__metainfo.py (renamed from lib/spack/external/py/__metainfo.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_apipkg.py (renamed from lib/spack/external/py/_apipkg.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_builtin.py (renamed from lib/spack/external/py/_builtin.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/__init__.py (renamed from lib/spack/external/py/_code/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/_assertionnew.py (renamed from lib/spack/external/py/_code/_assertionnew.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/_assertionold.py (renamed from lib/spack/external/py/_code/_assertionold.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/_py2traceback.py (renamed from lib/spack/external/py/_code/_py2traceback.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/assertion.py (renamed from lib/spack/external/py/_code/assertion.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/code.py (renamed from lib/spack/external/py/_code/code.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_code/source.py (renamed from lib/spack/external/py/_code/source.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_error.py (renamed from lib/spack/external/py/_error.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_iniconfig.py (renamed from lib/spack/external/py/_iniconfig.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/__init__.py (renamed from lib/spack/external/py/_io/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/capture.py (renamed from lib/spack/external/py/_io/capture.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/saferepr.py (renamed from lib/spack/external/py/_io/saferepr.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_io/terminalwriter.py (renamed from lib/spack/external/py/_io/terminalwriter.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_log/__init__.py (renamed from lib/spack/external/py/_log/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_log/log.py (renamed from lib/spack/external/py/_log/log.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_log/warning.py (renamed from lib/spack/external/py/_log/warning.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/__init__.py (renamed from lib/spack/external/py/_path/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/cacheutil.py (renamed from lib/spack/external/py/_path/cacheutil.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/common.py (renamed from lib/spack/external/py/_path/common.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/local.py (renamed from lib/spack/external/py/_path/local.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/svnurl.py (renamed from lib/spack/external/py/_path/svnurl.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_path/svnwc.py (renamed from lib/spack/external/py/_path/svnwc.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/__init__.py (renamed from lib/spack/external/py/_process/__init__.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/cmdexec.py (renamed from lib/spack/external/py/_process/cmdexec.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/forkedfunc.py (renamed from lib/spack/external/py/_process/forkedfunc.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_process/killproc.py (renamed from lib/spack/external/py/_process/killproc.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_std.py (renamed from lib/spack/external/py/_std.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/_xmlgen.py (renamed from lib/spack/external/py/_xmlgen.py)0
-rw-r--r--lib/spack/external/pytest-fallback/py/test.py (renamed from lib/spack/external/py/test.py)0
-rw-r--r--lib/spack/external/pytest-fallback/pytest.py (renamed from lib/spack/external/pytest.py)0
-rw-r--r--lib/spack/external/six.py147
-rw-r--r--lib/spack/llnl/util/multiproc.py2
-rw-r--r--lib/spack/llnl/util/tty/log.py2
-rw-r--r--lib/spack/spack/analyzers/libabigail.py12
-rw-r--r--lib/spack/spack/binary_distribution.py211
-rw-r--r--lib/spack/spack/bootstrap.py453
-rw-r--r--lib/spack/spack/build_environment.py7
-rw-r--r--lib/spack/spack/build_systems/autotools.py6
-rw-r--r--lib/spack/spack/build_systems/cmake.py7
-rw-r--r--lib/spack/spack/build_systems/cuda.py3
-rw-r--r--lib/spack/spack/build_systems/intel.py27
-rw-r--r--lib/spack/spack/build_systems/oneapi.py41
-rw-r--r--lib/spack/spack/build_systems/python.py3
-rw-r--r--lib/spack/spack/build_systems/rocm.py5
-rw-r--r--lib/spack/spack/build_systems/sip.py3
-rw-r--r--lib/spack/spack/ci.py49
-rw-r--r--lib/spack/spack/cmd/analyze.py1
-rw-r--r--lib/spack/spack/cmd/bootstrap.py43
-rw-r--r--lib/spack/spack/cmd/buildcache.py513
-rw-r--r--lib/spack/spack/cmd/checksum.py8
-rw-r--r--lib/spack/spack/cmd/ci.py34
-rw-r--r--lib/spack/spack/cmd/common/arguments.py19
-rw-r--r--lib/spack/spack/cmd/containerize.py1
-rw-r--r--lib/spack/spack/cmd/create.py6
-rw-r--r--lib/spack/spack/cmd/dev_build.py4
-rw-r--r--lib/spack/spack/cmd/flake8.py25
-rw-r--r--lib/spack/spack/cmd/install.py6
-rw-r--r--lib/spack/spack/cmd/license.py3
-rw-r--r--lib/spack/spack/cmd/mirror.py45
-rw-r--r--lib/spack/spack/cmd/monitor.py1
-rw-r--r--lib/spack/spack/cmd/style.py51
-rw-r--r--lib/spack/spack/cmd/tutorial.py4
-rw-r--r--lib/spack/spack/cmd/unit_test.py86
-rw-r--r--lib/spack/spack/compilers/dpcpp.py29
-rw-r--r--lib/spack/spack/compilers/oneapi.py10
-rw-r--r--lib/spack/spack/concretize.py30
-rw-r--r--lib/spack/spack/config.py3
-rw-r--r--lib/spack/spack/container/writers/__init__.py5
-rw-r--r--lib/spack/spack/detection/path.py12
-rw-r--r--lib/spack/spack/directives.py3
-rw-r--r--lib/spack/spack/environment/environment.py113
-rw-r--r--lib/spack/spack/extensions.py5
-rw-r--r--lib/spack/spack/fetch_strategy.py8
-rw-r--r--lib/spack/spack/filesystem_view.py5
-rw-r--r--lib/spack/spack/hooks/__init__.py1
-rw-r--r--lib/spack/spack/hooks/monitor.py11
-rw-r--r--lib/spack/spack/install_test.py5
-rw-r--r--lib/spack/spack/installer.py6
-rw-r--r--lib/spack/spack/mirror.py136
-rw-r--r--lib/spack/spack/monitor.py59
-rw-r--r--lib/spack/spack/operating_systems/linux_distro.py6
-rw-r--r--lib/spack/spack/package.py3
-rw-r--r--lib/spack/spack/pkgkit.py6
-rw-r--r--lib/spack/spack/relocate.py40
-rw-r--r--lib/spack/spack/reporters/cdash.py6
-rw-r--r--lib/spack/spack/s3_handler.py3
-rw-r--r--lib/spack/spack/schema/bootstrap.py2
-rw-r--r--lib/spack/spack/schema/buildcache_spec.py2
-rw-r--r--lib/spack/spack/schema/cdash.py2
-rw-r--r--lib/spack/spack/schema/compilers.py2
-rw-r--r--lib/spack/spack/schema/config.py2
-rw-r--r--lib/spack/spack/schema/database_index.py2
-rw-r--r--lib/spack/spack/schema/env.py2
-rw-r--r--lib/spack/spack/schema/gitlab_ci.py2
-rw-r--r--lib/spack/spack/schema/merged.py2
-rw-r--r--lib/spack/spack/schema/mirrors.py6
-rw-r--r--lib/spack/spack/schema/modules.py2
-rw-r--r--lib/spack/spack/schema/packages.py2
-rw-r--r--lib/spack/spack/schema/projections.py2
-rw-r--r--lib/spack/spack/schema/repos.py2
-rw-r--r--lib/spack/spack/schema/spec.py2
-rw-r--r--lib/spack/spack/schema/upstreams.py2
-rw-r--r--lib/spack/spack/spec.py11
-rw-r--r--lib/spack/spack/stage.py20
-rw-r--r--lib/spack/spack/store.py74
-rw-r--r--lib/spack/spack/test/bootstrap.py27
-rw-r--r--lib/spack/spack/test/build_distribution.py9
-rw-r--r--lib/spack/spack/test/build_systems.py16
-rw-r--r--lib/spack/spack/test/cc.py2
-rw-r--r--lib/spack/spack/test/cmd/audit.py2
-rw-r--r--lib/spack/spack/test/cmd/build_env.py10
-rw-r--r--lib/spack/spack/test/cmd/checksum.py4
-rw-r--r--lib/spack/spack/test/cmd/ci.py22
-rw-r--r--lib/spack/spack/test/cmd/dev_build.py2
-rw-r--r--lib/spack/spack/test/cmd/mirror.py29
-rw-r--r--lib/spack/spack/test/cmd/style.py15
-rw-r--r--lib/spack/spack/test/cmd/test.py29
-rw-r--r--lib/spack/spack/test/cmd/unit_test.py5
-rw-r--r--lib/spack/spack/test/cmd/url.py11
-rw-r--r--lib/spack/spack/test/compilers/detection.py5
-rw-r--r--lib/spack/spack/test/concretize_preferences.py10
-rw-r--r--lib/spack/spack/test/conftest.py12
-rw-r--r--lib/spack/spack/test/database.py15
-rw-r--r--lib/spack/spack/test/directives.py9
-rw-r--r--lib/spack/spack/test/llnl/util/tty/log.py22
-rw-r--r--lib/spack/spack/test/monitor.py28
-rw-r--r--lib/spack/spack/test/relocate.py52
-rw-r--r--lib/spack/spack/test/stage.py24
-rw-r--r--lib/spack/spack/test/web.py33
-rw-r--r--lib/spack/spack/util/mock_package.py6
-rw-r--r--lib/spack/spack/util/s3.py44
-rw-r--r--lib/spack/spack/util/spack_yaml.py4
-rw-r--r--lib/spack/spack/util/web.py10
-rw-r--r--share/spack/docker/amazonlinux-2.dockerfile1
-rw-r--r--share/spack/docker/ubuntu-1604.dockerfile1
-rw-r--r--share/spack/docker/ubuntu-1804.dockerfile1
-rw-r--r--share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml31
-rw-r--r--share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml33
-rw-r--r--share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml2
-rw-r--r--share/spack/keys/tutorial.pub60
-rwxr-xr-xshare/spack/qa/run-unit-tests1
-rwxr-xr-xshare/spack/spack-completion.bash33
-rw-r--r--var/spack/repos/builtin.mock/packages/autotools-conditional-variants-test/package.py11
-rw-r--r--var/spack/repos/builtin.mock/packages/cmake-conditional-variants-test/package.py9
-rw-r--r--var/spack/repos/builtin.mock/packages/extends-spec/package.py17
-rw-r--r--var/spack/repos/builtin.mock/packages/preferred-test/package.py12
-rw-r--r--var/spack/repos/builtin/packages/3dtk/package.py2
-rw-r--r--var/spack/repos/builtin/packages/abinit/package.py8
-rw-r--r--var/spack/repos/builtin/packages/acts/package.py67
-rw-r--r--var/spack/repos/builtin/packages/adol-c/package.py22
-rw-r--r--var/spack/repos/builtin/packages/amdblis/package.py39
-rw-r--r--var/spack/repos/builtin/packages/amdfftw/package.py180
-rw-r--r--var/spack/repos/builtin/packages/amdlibflame/package.py34
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/amdlibm/0001-libm-ose-Scripts-cleanup-pyc-files.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/amdlibm/0002-libm-ose-prevent-log-v3.c-from-building.patch0
-rw-r--r--var/spack/repos/builtin/packages/amdlibm/package.py28
-rw-r--r--var/spack/repos/builtin/packages/amdscalapack/package.py24
-rw-r--r--var/spack/repos/builtin/packages/amrex/package.py3
-rw-r--r--var/spack/repos/builtin/packages/ants/package.py1
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/aocc/package.py2
-rw-r--r--var/spack/repos/builtin/packages/aocl-sparse/package.py2
-rw-r--r--var/spack/repos/builtin/packages/aoflagger/package.py2
-rw-r--r--var/spack/repos/builtin/packages/apktool/package.py42
-rw-r--r--var/spack/repos/builtin/packages/arborx/package.py2
-rw-r--r--var/spack/repos/builtin/packages/asciidoc/package.py3
-rw-r--r--var/spack/repos/builtin/packages/assimp/package.py4
-rw-r--r--var/spack/repos/builtin/packages/autodiff/package.py35
-rw-r--r--var/spack/repos/builtin/packages/aws-parallelcluster/package.py6
-rw-r--r--var/spack/repos/builtin/packages/axom/package.py20
-rw-r--r--var/spack/repos/builtin/packages/axom/scr_examples_gtest.patch50
-rw-r--r--var/spack/repos/builtin/packages/bcftools/package.py4
-rw-r--r--var/spack/repos/builtin/packages/berkeley-db/package.py15
-rw-r--r--var/spack/repos/builtin/packages/bohrium/package.py22
-rw-r--r--var/spack/repos/builtin/packages/boost/package.py19
-rw-r--r--var/spack/repos/builtin/packages/bridger/package.py8
-rw-r--r--var/spack/repos/builtin/packages/bufr/package.py22
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/c-blosc/gcc.patch0
-rw-r--r--var/spack/repos/builtin/packages/camellia/package.py2
-rw-r--r--var/spack/repos/builtin/packages/cbtf-argonavis/package.py15
-rw-r--r--var/spack/repos/builtin/packages/cbtf-krell/package.py9
-rw-r--r--var/spack/repos/builtin/packages/cbtf-lanl/package.py12
-rw-r--r--var/spack/repos/builtin/packages/cbtf/package.py5
-rw-r--r--var/spack/repos/builtin/packages/ccache/package.py3
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/cctools/cctools_6.1.1.python.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/cctools/cctools_7.0.18.python.patch0
-rw-r--r--var/spack/repos/builtin/packages/cdo/package.py3
-rw-r--r--var/spack/repos/builtin/packages/cgdb/package.py18
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/charmpp/fj.patch0
-rw-r--r--var/spack/repos/builtin/packages/clhep/package.py1
-rw-r--r--var/spack/repos/builtin/packages/clingo/package.py1
-rw-r--r--var/spack/repos/builtin/packages/cmake/package.py2
-rw-r--r--var/spack/repos/builtin/packages/cni-plugins/package.py26
-rw-r--r--var/spack/repos/builtin/packages/conmon/package.py25
-rw-r--r--var/spack/repos/builtin/packages/cosign/package.py34
-rw-r--r--var/spack/repos/builtin/packages/cp2k/package.py25
-rw-r--r--var/spack/repos/builtin/packages/cpio/package.py10
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/cray-libsci/package.py1
-rw-r--r--var/spack/repos/builtin/packages/cuda/package.py4
-rw-r--r--var/spack/repos/builtin/packages/curl/package.py2
-rw-r--r--var/spack/repos/builtin/packages/cyrus-sasl/package.py1
-rw-r--r--var/spack/repos/builtin/packages/czmq/package.py2
-rw-r--r--var/spack/repos/builtin/packages/dbus/package.py2
-rw-r--r--var/spack/repos/builtin/packages/dd4hep/package.py76
-rw-r--r--var/spack/repos/builtin/packages/dealii/package.py1
-rw-r--r--var/spack/repos/builtin/packages/delly2/package.py46
-rw-r--r--var/spack/repos/builtin/packages/dpcpp/package.py157
-rw-r--r--var/spack/repos/builtin/packages/dsfmt/package.py32
-rw-r--r--var/spack/repos/builtin/packages/dsfmt/targets.patch25
-rw-r--r--var/spack/repos/builtin/packages/dust/package.py51
-rw-r--r--var/spack/repos/builtin/packages/dyninst/package.py4
-rw-r--r--var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py13
-rw-r--r--var/spack/repos/builtin/packages/edm4hep/package.py7
-rw-r--r--var/spack/repos/builtin/packages/elpa/package.py12
-rw-r--r--var/spack/repos/builtin/packages/esmf/package.py1
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/exago/package.py32
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/exaworks/package.py0
-rw-r--r--var/spack/repos/builtin/packages/f3d/package.py17
-rw-r--r--var/spack/repos/builtin/packages/fenics-dolfinx/package.py4
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ffr/gfortran_format_30.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ffr/gfortran_format_31.patch0
-rw-r--r--var/spack/repos/builtin/packages/flecsale/package.py4
-rw-r--r--var/spack/repos/builtin/packages/flecsi/package.py4
-rw-r--r--var/spack/repos/builtin/packages/flux-core/package.py2
-rw-r--r--var/spack/repos/builtin/packages/flux-sched/no-valgrind.patch19
-rw-r--r--var/spack/repos/builtin/packages/flux-sched/package.py12
-rw-r--r--var/spack/repos/builtin/packages/fpm/package.py4
-rw-r--r--var/spack/repos/builtin/packages/fraggenescan/package.py7
-rw-r--r--var/spack/repos/builtin/packages/fuse-overlayfs/package.py4
-rw-r--r--var/spack/repos/builtin/packages/fxdiv/package.py21
-rw-r--r--var/spack/repos/builtin/packages/g2/package.py7
-rw-r--r--var/spack/repos/builtin/packages/g2c/package.py5
-rw-r--r--var/spack/repos/builtin/packages/g4emlow/package.py1
-rw-r--r--var/spack/repos/builtin/packages/g4particlexs/package.py1
-rw-r--r--var/spack/repos/builtin/packages/g4tendl/package.py1
-rw-r--r--var/spack/repos/builtin/packages/gcc/package.py2
-rw-r--r--var/spack/repos/builtin/packages/gchp/package.py5
-rw-r--r--var/spack/repos/builtin/packages/gdal/package.py65
-rw-r--r--var/spack/repos/builtin/packages/gdk-pixbuf/package.py23
-rw-r--r--var/spack/repos/builtin/packages/geant4-data/package.py14
-rw-r--r--var/spack/repos/builtin/packages/geant4/package.py35
-rw-r--r--var/spack/repos/builtin/packages/gfsio/package.py7
-rw-r--r--var/spack/repos/builtin/packages/ginkgo/1.4.0_dpcpp_use_old_standard.patch70
-rw-r--r--var/spack/repos/builtin/packages/ginkgo/package.py32
-rw-r--r--var/spack/repos/builtin/packages/gl2ps/package.py1
-rw-r--r--var/spack/repos/builtin/packages/glib/package.py1
-rw-r--r--var/spack/repos/builtin/packages/go/package.py2
-rw-r--r--var/spack/repos/builtin/packages/goshimmer/package.py40
-rw-r--r--var/spack/repos/builtin/packages/gpgme/package.py31
-rw-r--r--var/spack/repos/builtin/packages/gpi-2/package.py17
-rw-r--r--var/spack/repos/builtin/packages/gptune/package.py173
-rw-r--r--var/spack/repos/builtin/packages/gpu-burn/package.py6
-rw-r--r--var/spack/repos/builtin/packages/grnboost/package.py2
-rw-r--r--var/spack/repos/builtin/packages/groff/package.py1
-rw-r--r--var/spack/repos/builtin/packages/gromacs-chain-coordinate/package.py68
-rw-r--r--var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectCpu-cmake-3.14.patch11
-rw-r--r--var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectSimd-cmake-3.14.patch11
-rw-r--r--var/spack/repos/builtin/packages/gromacs-swaxs/package.py67
-rw-r--r--var/spack/repos/builtin/packages/grpc/package.py6
-rw-r--r--var/spack/repos/builtin/packages/gtk-doc/package.py16
-rw-r--r--var/spack/repos/builtin/packages/gtkplus/package.py8
-rw-r--r--var/spack/repos/builtin/packages/harfbuzz/package.py3
-rw-r--r--var/spack/repos/builtin/packages/harminv/package.py6
-rw-r--r--var/spack/repos/builtin/packages/hdf-eos5/package.py2
-rw-r--r--var/spack/repos/builtin/packages/hdf5-vol-async/package.py26
-rw-r--r--var/spack/repos/builtin/packages/hdf5-vol-external-passthrough/package.py26
-rw-r--r--var/spack/repos/builtin/packages/hdf5-vol-log/package.py4
-rw-r--r--var/spack/repos/builtin/packages/hdf5/package.py6
-rw-r--r--var/spack/repos/builtin/packages/helib/package.py51
-rw-r--r--var/spack/repos/builtin/packages/hiop/package.py22
-rw-r--r--var/spack/repos/builtin/packages/hipblas/link-clients-blas.patch24
-rw-r--r--var/spack/repos/builtin/packages/hipblas/package.py12
-rw-r--r--var/spack/repos/builtin/packages/hpcg/package.py6
-rw-r--r--var/spack/repos/builtin/packages/hpctoolkit/package.py37
-rw-r--r--var/spack/repos/builtin/packages/hpx/package.py5
-rw-r--r--var/spack/repos/builtin/packages/htslib/package.py1
-rw-r--r--var/spack/repos/builtin/packages/hwloc/package.py8
-rw-r--r--var/spack/repos/builtin/packages/imgui/package.py24
-rw-r--r--var/spack/repos/builtin/packages/intel-daal/package.py2
-rw-r--r--var/spack/repos/builtin/packages/intel-llvm/package.py2
-rw-r--r--var/spack/repos/builtin/packages/intel-mkl/package.py2
-rw-r--r--var/spack/repos/builtin/packages/intel-mpi/package.py5
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py10
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-dal/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py40
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py24
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py4
-rw-r--r--var/spack/repos/builtin/packages/intel-tbb/package.py2
-rw-r--r--var/spack/repos/builtin/packages/interproscan/package.py1
-rw-r--r--var/spack/repos/builtin/packages/ip/package.py7
-rw-r--r--var/spack/repos/builtin/packages/ip2/package.py7
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/isaac-server/arm.patch0
-rw-r--r--var/spack/repos/builtin/packages/isescan/package.py2
-rw-r--r--var/spack/repos/builtin/packages/jq/builtinc.patch48
-rw-r--r--var/spack/repos/builtin/packages/jq/package.py5
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/julia/armgcc.patch0
-rw-r--r--var/spack/repos/builtin/packages/julia/package.py11
-rw-r--r--var/spack/repos/builtin/packages/kokkos-kernels/package.py7
-rw-r--r--var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py2
-rw-r--r--var/spack/repos/builtin/packages/landsfcutil/package.py9
-rw-r--r--var/spack/repos/builtin/packages/lanl-cmake-modules/package.py17
-rw-r--r--var/spack/repos/builtin/packages/lcio/package.py1
-rw-r--r--var/spack/repos/builtin/packages/legion/package.py10
-rw-r--r--var/spack/repos/builtin/packages/libbeagle/package.py12
-rw-r--r--var/spack/repos/builtin/packages/libbeato/package.py2
-rw-r--r--var/spack/repos/builtin/packages/libblastrampoline/package.py23
-rw-r--r--var/spack/repos/builtin/packages/libcroco/package.py10
-rw-r--r--var/spack/repos/builtin/packages/libctl/package.py9
-rw-r--r--var/spack/repos/builtin/packages/libcxxwrap-julia/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libdrm/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libfabric/package.py1
-rw-r--r--var/spack/repos/builtin/packages/libfuse/package.py17
-rw-r--r--var/spack/repos/builtin/packages/libint/package.py15
-rw-r--r--var/spack/repos/builtin/packages/liblzf/package.py19
-rw-r--r--var/spack/repos/builtin/packages/libmonitor/package.py1
-rw-r--r--var/spack/repos/builtin/packages/librsvg/package.py9
-rw-r--r--var/spack/repos/builtin/packages/libseccomp/package.py10
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/libsharp/1.0.0-arm.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/libsharp/arm.patch0
-rw-r--r--var/spack/repos/builtin/packages/libslirp/package.py19
-rw-r--r--var/spack/repos/builtin/packages/libspatialite/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libssh2/package.py26
-rw-r--r--var/spack/repos/builtin/packages/libtree/package.py50
-rw-r--r--var/spack/repos/builtin/packages/libxc/package.py1
-rw-r--r--var/spack/repos/builtin/packages/libxsmm/package.py3
-rw-r--r--var/spack/repos/builtin/packages/libzmq/package.py3
-rw-r--r--var/spack/repos/builtin/packages/likwid/package.py1
-rw-r--r--var/spack/repos/builtin/packages/llvm-doe/package.py321
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/llvm-openmp/package.py0
-rw-r--r--var/spack/repos/builtin/packages/llvm/llvm4-lld-ELF-Symbols.patch112
-rw-r--r--var/spack/repos/builtin/packages/llvm/llvm5-lld-ELF-Symbols.patch33
-rw-r--r--var/spack/repos/builtin/packages/llvm/llvm5-sanitizer-ustat.patch25
-rw-r--r--var/spack/repos/builtin/packages/llvm/missing-includes.patch23
-rw-r--r--var/spack/repos/builtin/packages/llvm/no_cyclades.patch81
-rw-r--r--var/spack/repos/builtin/packages/llvm/no_cyclades9.patch42
-rw-r--r--var/spack/repos/builtin/packages/llvm/package.py138
-rw-r--r--var/spack/repos/builtin/packages/llvm/sanitizer-ipc_perm_mode.patch9
-rw-r--r--var/spack/repos/builtin/packages/llvm/xray_buffer_queue-cstddef.patch5
-rw-r--r--var/spack/repos/builtin/packages/lmod/package.py1
-rw-r--r--var/spack/repos/builtin/packages/lua/package.py8
-rw-r--r--var/spack/repos/builtin/packages/lvm2/package.py3
-rw-r--r--var/spack/repos/builtin/packages/madgraph5amc/package.py3
-rw-r--r--var/spack/repos/builtin/packages/maker/package.py1
-rw-r--r--var/spack/repos/builtin/packages/mariadb-c-client/package.py1
-rw-r--r--var/spack/repos/builtin/packages/mbedtls/fix-dt-needed-shared-libs.patch75
-rw-r--r--var/spack/repos/builtin/packages/mbedtls/package.py7
-rw-r--r--var/spack/repos/builtin/packages/meep/package.py65
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/megahit/amd.patch0
-rw-r--r--var/spack/repos/builtin/packages/mercury/package.py80
-rw-r--r--var/spack/repos/builtin/packages/mesa/package.py17
-rw-r--r--var/spack/repos/builtin/packages/mesa18/package.py2
-rw-r--r--var/spack/repos/builtin/packages/meshtool/package.py1
-rw-r--r--var/spack/repos/builtin/packages/mfem/mfem-4.3-cusparse-11.4.patch80
-rw-r--r--var/spack/repos/builtin/packages/mfem/package.py2
-rw-r--r--var/spack/repos/builtin/packages/microsocks/package.py31
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/modylas/gcc_format.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/modylas/makefile.patch0
-rw-r--r--var/spack/repos/builtin/packages/mpibind/package.py59
-rw-r--r--var/spack/repos/builtin/packages/mpich/package.py12
-rw-r--r--var/spack/repos/builtin/packages/mpitrampoline/package.py6
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/mt-metis/non_x8664.patch0
-rw-r--r--var/spack/repos/builtin/packages/mujoco/package.py49
-rw-r--r--var/spack/repos/builtin/packages/mumps/package.py7
-rw-r--r--var/spack/repos/builtin/packages/muparserx/package.py14
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/mvapich2-gdr/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/mvapich2x/package.py0
-rw-r--r--var/spack/repos/builtin/packages/nalu-wind/package.py6
-rw-r--r--var/spack/repos/builtin/packages/nalu/package.py2
-rw-r--r--var/spack/repos/builtin/packages/namd/package.py2
-rw-r--r--var/spack/repos/builtin/packages/nccl/package.py2
-rw-r--r--var/spack/repos/builtin/packages/ncio/package.py6
-rw-r--r--var/spack/repos/builtin/packages/ncl/package.py6
-rw-r--r--var/spack/repos/builtin/packages/ncurses/package.py18
-rw-r--r--var/spack/repos/builtin/packages/nextflow/package.py2
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/nnvm/cmake2.patch0
-rw-r--r--var/spack/repos/builtin/packages/nsimd/package.py1
-rw-r--r--var/spack/repos/builtin/packages/nspr/package.py1
-rw-r--r--var/spack/repos/builtin/packages/nss/package.py9
-rw-r--r--var/spack/repos/builtin/packages/ntl/package.py62
-rw-r--r--var/spack/repos/builtin/packages/nvhpc/package.py4
-rw-r--r--var/spack/repos/builtin/packages/nwchem/package.py18
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ocaml/fix-duplicate-defs.patch0
-rw-r--r--var/spack/repos/builtin/packages/octave/package.py16
-rw-r--r--var/spack/repos/builtin/packages/oommf/package.py295
-rw-r--r--var/spack/repos/builtin/packages/opa-psm2/package.py1
-rw-r--r--var/spack/repos/builtin/packages/open-iscsi/package.py2
-rw-r--r--var/spack/repos/builtin/packages/open3d/package.py113
-rw-r--r--var/spack/repos/builtin/packages/openblas/package.py18
-rw-r--r--var/spack/repos/builtin/packages/opencarp/package.py5
-rw-r--r--var/spack/repos/builtin/packages/opencascade/package.py2
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch0
-rw-r--r--var/spack/repos/builtin/packages/openfoam/package.py17
-rw-r--r--var/spack/repos/builtin/packages/openjpeg/package.py22
-rw-r--r--var/spack/repos/builtin/packages/openldap/package.py55
-rw-r--r--var/spack/repos/builtin/packages/openlibm/package.py33
-rw-r--r--var/spack/repos/builtin/packages/openmpi/package.py11
-rw-r--r--var/spack/repos/builtin/packages/openspeedshop-utils/package.py8
-rw-r--r--var/spack/repos/builtin/packages/openspeedshop/package.py8
-rw-r--r--var/spack/repos/builtin/packages/openturns/package.py59
-rw-r--r--var/spack/repos/builtin/packages/oras/package.py47
-rw-r--r--var/spack/repos/builtin/packages/otf/package.py8
-rw-r--r--var/spack/repos/builtin/packages/pagmo2/package.py39
-rw-r--r--var/spack/repos/builtin/packages/palisade-development/package.py70
-rw-r--r--var/spack/repos/builtin/packages/parallel-netcdf/package.py15
-rw-r--r--var/spack/repos/builtin/packages/parallelio/package.py5
-rw-r--r--var/spack/repos/builtin/packages/paraview/package.py14
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/parsimonator/nox86.patch0
-rw-r--r--var/spack/repos/builtin/packages/patchelf/package.py25
-rw-r--r--var/spack/repos/builtin/packages/pcma/package.py4
-rw-r--r--var/spack/repos/builtin/packages/pcre/package.py3
-rw-r--r--var/spack/repos/builtin/packages/percept/package.py2
-rw-r--r--var/spack/repos/builtin/packages/percona-server/package.py3
-rw-r--r--var/spack/repos/builtin/packages/perl-dbd-mysql/package.py4
-rw-r--r--var/spack/repos/builtin/packages/perl-forks/package.py4
-rw-r--r--var/spack/repos/builtin/packages/perl/package.py16
-rw-r--r--var/spack/repos/builtin/packages/petsc/package.py62
-rw-r--r--var/spack/repos/builtin/packages/pgplot/g77_gcc.conf.patch12
-rw-r--r--var/spack/repos/builtin/packages/pgplot/package.py32
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/php/sbang.patch0
-rw-r--r--var/spack/repos/builtin/packages/phyluce/package.py2
-rw-r--r--var/spack/repos/builtin/packages/pixz/package.py1
-rw-r--r--var/spack/repos/builtin/packages/podman/package.py81
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/poppler/poppler_page_splash.0.90.1.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/poppler/poppler_page_splash.patch0
-rw-r--r--var/spack/repos/builtin/packages/portage/package.py34
-rw-r--r--var/spack/repos/builtin/packages/portcullis/package.py10
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/ppopen-appl-fem/gcc_struct_atomic.patch0
-rw-r--r--var/spack/repos/builtin/packages/py-aiohttp/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-aiosqlite/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-astroid/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-astropy/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-automat/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-babel/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-backports-os/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-boost-histogram/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-brian2/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-build/package.py26
-rw-r--r--var/spack/repos/builtin/packages/py-carputils/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-click/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-climate/package.py27
-rw-r--r--var/spack/repos/builtin/packages/py-cmake/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-cmsml/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-configspace/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-constantly/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-copulas/package.py28
-rw-r--r--var/spack/repos/builtin/packages/py-correctionlib/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-coverage/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-cryptography/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-ctgan/package.py29
-rw-r--r--var/spack/repos/builtin/packages/py-cx-oracle/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-cycler/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-cython/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-debugpy/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-deepdiff/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-dh-scikit-optimize/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-distlib/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-downhill/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-envisage/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-faker/package.py25
-rw-r--r--var/spack/repos/builtin/packages/py-fastjsonschema/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-filelock/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-fire/package.py25
-rw-r--r--var/spack/repos/builtin/packages/py-flask/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-flawfinder/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-fonttools/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-formulaic/package.py26
-rw-r--r--var/spack/repos/builtin/packages/py-fs/package.py25
-rw-r--r--var/spack/repos/builtin/packages/py-gast/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-genshi/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-gevent/package.py33
-rw-r--r--var/spack/repos/builtin/packages/py-gin-config/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-gpy/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-greenlet/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-grpcio-tools/package.py60
-rw-r--r--var/spack/repos/builtin/packages/py-grpcio/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-h5py/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-h5sh/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-hatchet/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-hep-ml/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-hepdata-lib/package.py24
-rw-r--r--var/spack/repos/builtin/packages/py-hist/package.py31
-rw-r--r--var/spack/repos/builtin/packages/py-histbook/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-histogrammar/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-histoprint/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-hpbandster/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-httpretty/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-humanize/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-hyperlink/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-idna/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-imageio/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-imagesize/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-iminuit/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-importlib-metadata/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-incremental/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-intensity-normalization/package.py27
-rw-r--r--var/spack/repos/builtin/packages/py-interface-meta/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-ipykernel/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-itsdangerous/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-jeepney/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-jinja2/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-jsonpickle/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-keras2onnx/package.py24
-rw-r--r--var/spack/repos/builtin/packages/py-keyring/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-kornia/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-laspy/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-law/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-lhsmdu/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-liac-arff/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-lizard/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-lockfile/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-lws/package.py25
-rw-r--r--var/spack/repos/builtin/packages/py-lxml/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-lz4/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-make/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-mako/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-markdown/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-matplotlib/matplotlibrc.patch11
-rw-r--r--var/spack/repos/builtin/packages/py-matplotlib/package.py49
-rw-r--r--var/spack/repos/builtin/packages/py-mmcv/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-more-itertools/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-mpld3/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-mplhep-data/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-mplhep/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-mpmath/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-multidict/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-mxfold2/package.py26
-rw-r--r--var/spack/repos/builtin/packages/py-mypy/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-nbclient/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-nbconvert/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-nbdime/package.py30
-rw-r--r--var/spack/repos/builtin/packages/py-nest-asyncio/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-networkx/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-neurokit2/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-neurolab/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-ninja/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-nose/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-notebook/package.py4
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/py-ntplib/package.py0
-rw-r--r--var/spack/repos/builtin/packages/py-numpy/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-oauthlib/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-onnx-runtime/cms.patch146
-rw-r--r--var/spack/repos/builtin/packages/py-onnx-runtime/gcc11.patch36
-rw-r--r--var/spack/repos/builtin/packages/py-onnx-runtime/libiconv.patch42
-rw-r--r--var/spack/repos/builtin/packages/py-onnx-runtime/package.py121
-rw-r--r--var/spack/repos/builtin/packages/py-onnx/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-onnxconverter-common/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-onnxmltools/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-openpmd-validator/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-openpmd-viewer/package.py56
-rw-r--r--var/spack/repos/builtin/packages/py-opentuner/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-packaging/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-pandas/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-pandocfilters/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-parmed/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-parsimonious/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-parso/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pathlib2/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pbr/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pep517/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-petsc4py/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pexpect/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pickle5/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-picmistandard/package.py18
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/py-pika/package.py0
-rw-r--r--var/spack/repos/builtin/packages/py-pillow/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-pkgconfig/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-pkginfo/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-plac/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-platformdirs/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-pluggy/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-ply/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-pmw-patched/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-pooch/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-prettytable/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-progressbar2/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-prometheus-client/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-prompt-toolkit/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-prwlock/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-py/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-pyaml/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-pyasn1-modules/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-pyasn1/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pybids/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-pybind11/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pybrain/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-pycortex/package.py53
-rw-r--r--var/spack/repos/builtin/packages/py-pycuda/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-pycurl/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-pygdal/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-pygraphviz/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-pylint/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-pymol/package.py58
-rw-r--r--var/spack/repos/builtin/packages/py-pymongo/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-pynisher/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-pyparsing/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-pyparsing/setuptools-import.patch20
-rw-r--r--var/spack/repos/builtin/packages/py-pyqt-builder/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-pyro4/package.py85
-rw-r--r--var/spack/repos/builtin/packages/py-pyrsistent/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-pyscipopt/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-pyspellchecker/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-pysqlite3/package.py26
-rw-r--r--var/spack/repos/builtin/packages/py-pytest-cov/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-pytest-runner/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-pytest/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-python-constraint/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-python-daemon/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-python-ldap/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-python-rapidjson/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-pythonqwt/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-pythonsollya/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-pythran/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-pytools/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-pytorch-lightning/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-pyyaml/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-pyzmq/package.py46
-rw-r--r--var/spack/repos/builtin/packages/py-qiskit-aer/package.py56
-rw-r--r--var/spack/repos/builtin/packages/py-qiskit-terra/package.py34
-rw-r--r--var/spack/repos/builtin/packages/py-qpth/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-qtconsole/package.py6
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/py-radical-entk/package.py3
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/py-radical-gtod/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/py-radical-pilot/package.py6
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/py-radical-saga/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/py-radical-utils/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-rapidfuzz/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-rdt/package.py25
-rw-r--r--var/spack/repos/builtin/packages/py-requests-oauthlib/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-requests/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-retry/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-retworkx/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-rich/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-rsa/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-ruamel-yaml-clib/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-schema/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-build/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-fuzzy/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-learn-extra/package.py27
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-optimize/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-optimize/space.patch57
-rw-r--r--var/spack/repos/builtin/packages/py-scinum/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-scipy/package.py13
-rw-r--r--var/spack/repos/builtin/packages/py-selectors34/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-send2trash/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-serpent/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-setupmeta/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-setuptools-cpp/package.py46
-rw-r--r--var/spack/repos/builtin/packages/py-setuptools/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-shellingham/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-sina/no_orjson.patch15
-rw-r--r--var/spack/repos/builtin/packages/py-sina/package.py44
-rw-r--r--var/spack/repos/builtin/packages/py-singledispatch/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-sip/package.py78
-rw-r--r--var/spack/repos/builtin/packages/py-skl2onnx/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-slepc4py/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-sphinx-argparse/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-sphinx-multiversion/package.py21
-rw-r--r--var/spack/repos/builtin/packages/py-sphinx/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-stevedore/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-symengine/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-terminado/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-tern/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-testpath/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-theano/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-threadpoolctl/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-tifffile/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-tomlkit/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-torch/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-torchgeo/package.py32
-rw-r--r--var/spack/repos/builtin/packages/py-torchvision/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-traitlets/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-traits/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-trojanzoo-sphinx-theme/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-tweedledum/package.py25
-rw-r--r--var/spack/repos/builtin/packages/py-twisted/package.py83
-rw-r--r--var/spack/repos/builtin/packages/py-uhi/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-uproot/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-vector-quantize-pytorch/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-vector/package.py24
-rw-r--r--var/spack/repos/builtin/packages/py-virtualenv-clone/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-virtualenv/package.py13
-rw-r--r--var/spack/repos/builtin/packages/py-virtualenvwrapper/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-warpx/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-wcwidth/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-websocket-client/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-werkzeug/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-wheel/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-wrapt/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-wurlitzer/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-xrootdpyfs/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-yarl/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-yq/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-ytopt-autotune/package.py24
-rw-r--r--var/spack/repos/builtin/packages/py-ytopt-autotune/version.patch10
-rw-r--r--var/spack/repos/builtin/packages/py-ytopt/package.py24
-rw-r--r--var/spack/repos/builtin/packages/py-zope-event/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-zope-interface/package.py1
-rw-r--r--var/spack/repos/builtin/packages/pygmo/package.py35
-rw-r--r--var/spack/repos/builtin/packages/python/package.py10
-rw-r--r--var/spack/repos/builtin/packages/python/tkinter-3.10.patch11
-rw-r--r--var/spack/repos/builtin/packages/qmcpack/package.py26
-rw-r--r--var/spack/repos/builtin/packages/qt/package.py35
-rw-r--r--var/spack/repos/builtin/packages/quantum-espresso/configure_aocc.patch12
-rw-r--r--var/spack/repos/builtin/packages/quantum-espresso/package.py258
-rw-r--r--var/spack/repos/builtin/packages/r-affy/package.py2
-rw-r--r--var/spack/repos/builtin/packages/r-backports/package.py6
-rw-r--r--var/spack/repos/builtin/packages/r-bh/package.py8
-rw-r--r--var/spack/repos/builtin/packages/r-blob/package.py6
-rw-r--r--var/spack/repos/builtin/packages/r-brio/package.py4
-rw-r--r--var/spack/repos/builtin/packages/r-brms/package.py3
-rw-r--r--var/spack/repos/builtin/packages/r-car/package.py3
-rw-r--r--var/spack/repos/builtin/packages/r-cli/package.py1
-rw-r--r--var/spack/repos/builtin/packages/r-colorspace/package.py6
-rw-r--r--var/spack/repos/builtin/packages/r-colourpicker/package.py1
-rw-r--r--var/spack/repos/builtin/packages/r-conquer/package.py5
-rw-r--r--var/spack/repos/builtin/packages/r-cpp11/package.py1
-rw-r--r--var/spack/repos/builtin/packages/r-crayon/package.py1
-rw-r--r--var/spack/repos/builtin/packages/r-crosstalk/package.py6
-rw-r--r--var/spack/repos/builtin/packages/r-dbi/package.py10
-rw-r--r--var/spack/repos/builtin/packages/r-desc/package.py13
-rw-r--r--var/spack/repos/builtin/packages/r-diffobj/package.py4
-rw-r--r--var/spack/repos/builtin/packages/r-dt/package.py5
-rw-r--r--var/spack/repos/builtin/packages/r-emmeans/package.py1
-rw-r--r--var/spack/repos/builtin/packages/r-htmltools/package.py3
-rw-r--r--var/spack/repos/builtin/packages/r-jquerylib/package.py16
-rw-r--r--var/spack/repos/builtin/packages/r-packrat/package.py4
-rw-r--r--var/spack/repos/builtin/packages/r-prettydoc/package.py2
-rw-r--r--var/spack/repos/builtin/packages/r-r6/package.py4
-rw-r--r--var/spack/repos/builtin/packages/r-rcpparmadillo/package.py4
-rw-r--r--var/spack/repos/builtin/packages/r-rio/package.py9
-rw-r--r--var/spack/repos/builtin/packages/r-rmarkdown/package.py2
-rw-r--r--var/spack/repos/builtin/packages/r-rpostgresql/package.py8
-rw-r--r--var/spack/repos/builtin/packages/r-rsconnect/package.py2
-rw-r--r--var/spack/repos/builtin/packages/r-tictoc/package.py11
-rw-r--r--var/spack/repos/builtin/packages/r-tidyverse/package.py81
-rw-r--r--var/spack/repos/builtin/packages/r-v8/package.py12
-rw-r--r--var/spack/repos/builtin/packages/r-viridislite/package.py4
-rw-r--r--var/spack/repos/builtin/packages/r-vroom/package.py1
-rw-r--r--var/spack/repos/builtin/packages/racket/package.py67
-rw-r--r--var/spack/repos/builtin/packages/raja/package.py12
-rw-r--r--var/spack/repos/builtin/packages/random123/package.py12
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/rapidjson/arm.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/raxml/nox86.patch0
-rw-r--r--var/spack/repos/builtin/packages/rclone/package.py3
-rw-r--r--var/spack/repos/builtin/packages/reframe/package.py2
-rw-r--r--var/spack/repos/builtin/packages/rhash/package.py49
-rw-r--r--var/spack/repos/builtin/packages/rivet/package.py2
-rw-r--r--var/spack/repos/builtin/packages/rocm-openmp-extras/package.py94
-rw-r--r--var/spack/repos/builtin/packages/rocm-tensile/package.py29
-rw-r--r--var/spack/repos/builtin/packages/rocm-validation-suite/004-remove-git-download-yaml-cpp-use-yaml-cpp-recipe.patch50
-rw-r--r--var/spack/repos/builtin/packages/rocm-validation-suite/package.py6
-rw-r--r--var/spack/repos/builtin/packages/rocprofiler-dev/package.py2
-rw-r--r--var/spack/repos/builtin/packages/rocsolver/link-clients-blas.patch22
-rw-r--r--var/spack/repos/builtin/packages/rocsolver/package.py11
-rw-r--r--var/spack/repos/builtin/packages/roms/package.py3
-rw-r--r--var/spack/repos/builtin/packages/runc/package.py24
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/salmon-tddft/cmakefix.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/salmon-tddft/fjmpi.patch0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/salmon-tddft/package.py0
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/salmon-tddft/v2.0.libxc-5.0.patch0
-rw-r--r--var/spack/repos/builtin/packages/samtools/package.py15
-rw-r--r--var/spack/repos/builtin/packages/scale/fj-own_compiler.patch51
-rw-r--r--var/spack/repos/builtin/packages/scale/package.py84
-rw-r--r--var/spack/repos/builtin/packages/seacas/package.py3
-rw-r--r--var/spack/repos/builtin/packages/seal/package.py30
-rw-r--r--var/spack/repos/builtin/packages/sensei/package.py9
-rw-r--r--var/spack/repos/builtin/packages/sfcio/package.py7
-rw-r--r--var/spack/repos/builtin/packages/sherpa/package.py129
-rw-r--r--var/spack/repos/builtin/packages/sigio/package.py7
-rw-r--r--var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch58
-rw-r--r--var/spack/repos/builtin/packages/silo/package.py5
-rw-r--r--var/spack/repos/builtin/packages/simgrid/package.py4
-rw-r--r--var/spack/repos/builtin/packages/singularity/package.py3
-rw-r--r--var/spack/repos/builtin/packages/singularityce/package.py7
-rw-r--r--var/spack/repos/builtin/packages/sirius/package.py1
-rw-r--r--var/spack/repos/builtin/packages/slepc/package.py3
-rw-r--r--var/spack/repos/builtin/packages/slirp4netns/package.py25
-rw-r--r--var/spack/repos/builtin/packages/soapdenovo2/package.py8
-rw-r--r--var/spack/repos/builtin/packages/sollve/package.py1
-rw-r--r--var/spack/repos/builtin/packages/sp/package.py7
-rw-r--r--var/spack/repos/builtin/packages/spack/package.py18
-rw-r--r--var/spack/repos/builtin/packages/star-ccm-plus/package.py21
-rw-r--r--var/spack/repos/builtin/packages/structure/package.py6
-rw-r--r--var/spack/repos/builtin/packages/suite-sparse/package.py96
-rw-r--r--var/spack/repos/builtin/packages/sundials/package.py3
-rw-r--r--var/spack/repos/builtin/packages/superlu-dist/package.py19
-rw-r--r--var/spack/repos/builtin/packages/superlu-dist/superlu-cray-ftn-case.patch15
-rw-r--r--var/spack/repos/builtin/packages/symengine/package.py1
-rw-r--r--var/spack/repos/builtin/packages/tangram/package.py20
-rw-r--r--var/spack/repos/builtin/packages/tau/package.py4
-rw-r--r--var/spack/repos/builtin/packages/thepeg/package.py2
-rw-r--r--var/spack/repos/builtin/packages/tinygltf/package.py17
-rw-r--r--var/spack/repos/builtin/packages/tinyobjloader/package.py17
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/tiptop/NR_perf_counter_open_aarch64.patch0
-rw-r--r--var/spack/repos/builtin/packages/tree-sitter/package.py23
-rw-r--r--var/spack/repos/builtin/packages/trilinos/package.py24
-rw-r--r--var/spack/repos/builtin/packages/umpire/package.py7
-rw-r--r--var/spack/repos/builtin/packages/utf8cpp/package.py23
-rw-r--r--var/spack/repos/builtin/packages/utf8proc/package.py3
-rw-r--r--var/spack/repos/builtin/packages/vecgeom/package.py1
-rw-r--r--var/spack/repos/builtin/packages/vecmem/package.py48
-rw-r--r--var/spack/repos/builtin/packages/visit/package.py62
-rw-r--r--var/spack/repos/builtin/packages/visit/vtk_compiler_visibility.patch12
-rw-r--r--var/spack/repos/builtin/packages/visit/vtk_rendering_opengl2_x11.patch12
-rw-r--r--var/spack/repos/builtin/packages/visit/vtk_wrapping_python_x11.patch14
-rw-r--r--var/spack/repos/builtin/packages/votca-csg-tutorials/package.py30
-rw-r--r--var/spack/repos/builtin/packages/votca-csg/package.py30
-rw-r--r--var/spack/repos/builtin/packages/votca-csgapps/package.py18
-rw-r--r--var/spack/repos/builtin/packages/votca-ctp/package.py5
-rw-r--r--var/spack/repos/builtin/packages/votca-tools/package.py27
-rw-r--r--var/spack/repos/builtin/packages/votca-xtp/package.py27
-rw-r--r--var/spack/repos/builtin/packages/votca/package.py64
-rw-r--r--var/spack/repos/builtin/packages/vtk-h/package.py5
-rw-r--r--var/spack/repos/builtin/packages/vtk-m/package.py3
-rw-r--r--var/spack/repos/builtin/packages/vtk/internal_findHDF5.patch16
-rw-r--r--var/spack/repos/builtin/packages/vtk/package.py167
-rw-r--r--[-rwxr-xr-x]var/spack/repos/builtin/packages/vvtest/package.py0
-rw-r--r--var/spack/repos/builtin/packages/w3emc/package.py7
-rw-r--r--var/spack/repos/builtin/packages/warpx/2626.patch34
-rw-r--r--var/spack/repos/builtin/packages/warpx/package.py22
-rw-r--r--var/spack/repos/builtin/packages/whizard/package.py1
-rw-r--r--var/spack/repos/builtin/packages/wi4mpi/package.py55
-rw-r--r--var/spack/repos/builtin/packages/wiredtiger/package.py39
-rw-r--r--var/spack/repos/builtin/packages/wonton/package.py14
-rw-r--r--var/spack/repos/builtin/packages/xrootd/no-systemd.patch17
-rw-r--r--var/spack/repos/builtin/packages/xrootd/package.py12
-rw-r--r--var/spack/repos/builtin/packages/xsdk/package.py482
1017 files changed, 28058 insertions, 9913 deletions
diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml
index 5b3e237b0c..ec7d9a3898 100644
--- a/.github/workflows/bootstrap.yml
+++ b/.github/workflows/bootstrap.yml
@@ -1,6 +1,8 @@
name: Bootstrapping
on:
+ # This Workflow can be triggered manually
+ workflow_dispatch:
pull_request:
branches:
- develop
@@ -76,13 +78,42 @@ jobs:
spack -d solve zlib
tree ~/.spack/bootstrap/store/
+ ubuntu-clingo-binaries-and-patchelf:
+ runs-on: ubuntu-latest
+ container: "ubuntu:latest"
+ steps:
+ - name: Install dependencies
+ env:
+ DEBIAN_FRONTEND: noninteractive
+ run: |
+ apt-get update -y && apt-get upgrade -y
+ apt-get install -y \
+ bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
+ make patch unzip xz-utils python3 python3-dev tree
+ - uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
+ - name: Setup repo and non-root user
+ run: |
+ git --version
+ git fetch --unshallow
+ . .github/workflows/setup_git.sh
+ useradd -m spack-test
+ chown -R spack-test .
+ - name: Bootstrap clingo
+ shell: runuser -u spack-test -- bash {0}
+ run: |
+ source share/spack/setup-env.sh
+ spack -d solve zlib
+ tree ~/.spack/bootstrap/store/
+
+
opensuse-clingo-sources:
runs-on: ubuntu-latest
container: "opensuse/leap:latest"
steps:
- name: Install dependencies
run: |
- zypper update -y
+ # Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
+ zypper update -y || zypper update -y
zypper install -y \
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
make patch unzip which xz python3 python3-devel tree \
diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml
index a55e0daa77..1e996111b8 100644
--- a/.github/workflows/build-containers.yml
+++ b/.github/workflows/build-containers.yml
@@ -12,6 +12,7 @@ on:
- develop
paths:
- '.github/workflows/build-containers.yml'
+ - 'share/spack/docker/*'
# Let's also build & tag Spack containers on releases.
release:
types: [published]
@@ -66,14 +67,15 @@ jobs:
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
- name: Log in to GitHub Container Registry
- uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
+ uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Log in to DockerHub
- uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
+ if: ${{ github.event_name != 'pull_request' }}
+ uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml
index 764a9cdcf0..a1cd2bbd42 100644
--- a/.github/workflows/unit_tests.yaml
+++ b/.github/workflows/unit_tests.yaml
@@ -24,9 +24,9 @@ jobs:
pip install --upgrade pip
pip install --upgrade vermin
- name: vermin (Spack's Core)
- run: vermin --backport argparse --violations --backport typing -t=2.6- -t=3.5- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
+ run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.5- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
- run: vermin --backport argparse --violations --backport typing -t=2.6- -t=3.5- -vvv var/spack/repos
+ run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.5- -vvv var/spack/repos
# Run style checks on the files that have been changed
style:
runs-on: ubuntu-latest
@@ -97,7 +97,14 @@ jobs:
strategy:
matrix:
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
- concretizer: ['original', 'clingo']
+ concretizer: ['clingo']
+ include:
+ - python-version: 2.7
+ concretizer: original
+ - python-version: 3.6
+ concretizer: original
+ - python-version: 3.9
+ concretizer: original
steps:
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
with:
@@ -114,7 +121,7 @@ jobs:
patchelf cmake bison libbison-dev kcov
- name: Install Python packages
run: |
- pip install --upgrade pip six setuptools codecov coverage[toml]
+ pip install --upgrade pip six setuptools pytest codecov coverage[toml]
# ensure style checks are not skipped in unit tests for python >= 3.6
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
@@ -173,7 +180,7 @@ jobs:
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
- name: Install Python packages
run: |
- pip install --upgrade pip six setuptools codecov coverage[toml]
+ pip install --upgrade pip six setuptools pytest codecov coverage[toml]
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@@ -193,39 +200,6 @@ jobs:
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: shelltests,linux
- # Test for Python2.6 run on Centos 6
- centos6:
- needs: [ validate, style, changes ]
- runs-on: ubuntu-latest
- container: spack/github-actions:centos6
- steps:
- - name: Run unit tests (full test-suite)
- # The CentOS 6 container doesn't run with coverage, but
- # under the same conditions it runs the full test suite
- if: ${{ needs.changes.outputs.with_coverage == 'true' }}
- env:
- HOME: /home/spack-test
- SPACK_TEST_SOLVER: original
- run: |
- whoami && echo $HOME && cd $HOME
- git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
- git fetch origin "${{ github.ref }}:test-branch"
- git checkout test-branch
- . .github/workflows/setup_git.sh
- bin/spack unit-test -x
- - name: Run unit tests (only package tests)
- if: ${{ needs.changes.outputs.with_coverage == 'false' }}
- env:
- HOME: /home/spack-test
- ONLY_PACKAGES: true
- SPACK_TEST_SOLVER: original
- run: |
- whoami && echo $HOME && cd $HOME
- git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
- git fetch origin "${{ github.ref }}:test-branch"
- git checkout test-branch
- . .github/workflows/setup_git.sh
- bin/spack unit-test -x -m "not maybeslow" -k "package_sanity"
# Test RHEL8 UBI with platform Python. This job is run
# only on PRs modifying core Spack
@@ -274,7 +248,7 @@ jobs:
patchelf kcov
- name: Install Python packages
run: |
- pip install --upgrade pip six setuptools codecov coverage[toml] clingo
+ pip install --upgrade pip six setuptools pytest codecov coverage[toml] clingo
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@@ -317,7 +291,7 @@ jobs:
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools
- pip install --upgrade codecov coverage[toml]
+ pip install --upgrade pytest codecov coverage[toml]
- name: Setup Homebrew packages
run: |
brew install dash fish gcc gnupg2 kcov
@@ -347,3 +321,32 @@ jobs:
with:
files: ./coverage.xml
flags: unittests,macos
+
+ # Run audits on all the packages in the built-in repository
+ package-audits:
+ needs: [ validate, style, changes ]
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
+ - uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
+ with:
+ python-version: 3.9
+ - name: Install Python packages
+ run: |
+ pip install --upgrade pip six setuptools pytest codecov coverage[toml]
+ - name: Package audits (with coverage)
+ if: ${{ needs.changes.outputs.with_coverage == 'true' }}
+ run: |
+ . share/spack/setup-env.sh
+ coverage run $(which spack) audit packages
+ coverage combine
+ coverage xml
+ - name: Package audits (wwithout coverage)
+ if: ${{ needs.changes.outputs.with_coverage == 'false' }}
+ run: |
+ . share/spack/setup-env.sh
+ $(which spack) audit packages
+ - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
+ if: ${{ needs.changes.outputs.with_coverage == 'true' }}
+ with:
+ flags: unittests,linux,audits
diff --git a/CITATION.cff b/CITATION.cff
new file mode 100644
index 0000000000..4ae54a57df
--- /dev/null
+++ b/CITATION.cff
@@ -0,0 +1,58 @@
+# If you are referencing Spack in a publication, please cite the SC'15 paper
+# described here.
+#
+# Here's the raw citation:
+#
+# Todd Gamblin, Matthew P. LeGendre, Michael R. Collette, Gregory L. Lee,
+# Adam Moody, Bronis R. de Supinski, and W. Scott Futral.
+# The Spack Package Manager: Bringing Order to HPC Software Chaos.
+# In Supercomputing 2015 (SC’15), Austin, Texas, November 15-20 2015. LLNL-CONF-669890.
+#
+# Or, in BibTeX:
+#
+# @inproceedings{Gamblin_The_Spack_Package_2015,
+# address = {Austin, Texas, USA},
+# author = {Gamblin, Todd and LeGendre, Matthew and
+# Collette, Michael R. and Lee, Gregory L. and
+# Moody, Adam and de Supinski, Bronis R. and Futral, Scott},
+# doi = {10.1145/2807591.2807623},
+# month = {November 15-20},
+# note = {LLNL-CONF-669890},
+# series = {Supercomputing 2015 (SC’15)},
+# title = {{The Spack Package Manager: Bringing Order to HPC Software Chaos}},
+# url = {https://github.com/spack/spack},
+# year = {2015}
+# }
+#
+# And here's the CITATION.cff format:
+#
+cff-version: 1.2.0
+message: "If you are referencing Spack in a publication, please cite the paper below."
+preferred-citation:
+ type: conference-paper
+ doi: "10.1145/2807591.2807623"
+ url: "https://github.com/spack/spack"
+ authors:
+ - family-names: "Gamblin"
+ given-names: "Todd"
+ - family-names: "LeGendre"
+ given-names: "Matthew"
+ - family-names: "Collette"
+ given-names: "Michael R."
+ - family-names: "Lee"
+ given-names: "Gregory L."
+ - family-names: "Moody"
+ given-names: "Adam"
+ - family-names: "de Supinski"
+ given-names: "Bronis R."
+ - family-names: "Futral"
+ given-names: "Scott"
+ title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
+ conference:
+ name: "Supercomputing 2015 (SC’15)"
+ city: "Austin"
+ region: "Texas"
+ country: "USA"
+ month: November 15-20
+ year: 2015
+ notes: LLNL-CONF-669890
diff --git a/COPYRIGHT b/COPYRIGHT
index 7bc67442d7..c0550caf95 100644
--- a/COPYRIGHT
+++ b/COPYRIGHT
@@ -38,6 +38,10 @@ PackageName: argparse
PackageHomePage: https://pypi.python.org/pypi/argparse
PackageLicenseDeclared: Python-2.0
+PackageName: attrs
+PackageHomePage: https://github.com/python-attrs/attrs
+PackageLicenseDeclared: MIT
+
PackageName: ctest_log_parser
PackageHomePage: https://github.com/Kitware/CMake
PackageLicenseDeclared: BSD-3-Clause
@@ -46,8 +50,8 @@ PackageName: distro
PackageHomePage: https://pypi.python.org/pypi/distro
PackageLicenseDeclared: Apache-2.0
-PackageName: functools
-PackageHomePage: https://github.com/python/cpython/blob/2.7/Lib/functools.py
+PackageName: functools32
+PackageHomePage: https://github.com/MiCHiLU/python-functools32
PackageLicenseDeclared: Python-2.0
PackageName: jinja2
@@ -70,6 +74,10 @@ PackageName: py
PackageHomePage: https://pypi.python.org/pypi/py
PackageLicenseDeclared: MIT
+PackageName: pyrsistent
+PackageHomePage: http://github.com/tobgu/pyrsistent
+PackageLicenseDeclared: MIT
+
PackageName: pytest
PackageHomePage: https://pypi.python.org/pypi/pytest
PackageLicenseDeclared: MIT
diff --git a/README.md b/README.md
index 186c0a1462..e8043672e5 100644
--- a/README.md
+++ b/README.md
@@ -125,6 +125,9 @@ If you are referencing Spack in a publication, please cite the following paper:
[**The Spack Package Manager: Bringing Order to HPC Software Chaos**](https://www.computer.org/csdl/proceedings/sc/2015/3723/00/2807623.pdf).
In *Supercomputing 2015 (SC’15)*, Austin, Texas, November 15-20 2015. LLNL-CONF-669890.
+On GitHub, you can copy this citation in APA or BibTeX format via the "Cite this repository"
+button. Or, see the comments in `CITATION.cff` for the raw BibTeX.
+
License
----------------
diff --git a/bin/spack b/bin/spack
index dffcbd2026..3b4f782d1c 100755
--- a/bin/spack
+++ b/bin/spack
@@ -33,11 +33,11 @@ import sys
min_python3 = (3, 5)
-if sys.version_info[:2] < (2, 6) or (
+if sys.version_info[:2] < (2, 7) or (
sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3
):
v_info = sys.version_info[:3]
- msg = "Spack requires Python 2.6, 2.7 or %d.%d or higher " % min_python3
+ msg = "Spack requires Python 2.7 or %d.%d or higher " % min_python3
msg += "You are running spack with Python %d.%d.%d." % v_info
sys.exit(msg)
@@ -54,8 +54,6 @@ spack_external_libs = os.path.join(spack_lib_path, "external")
if sys.version_info[:2] <= (2, 7):
sys.path.insert(0, os.path.join(spack_external_libs, "py2"))
-if sys.version_info[:2] == (2, 6):
- sys.path.insert(0, os.path.join(spack_external_libs, "py26"))
sys.path.insert(0, spack_external_libs)
diff --git a/lib/spack/docs/analyze.rst b/lib/spack/docs/analyze.rst
index 38af77cd7f..2df48d1e76 100644
--- a/lib/spack/docs/analyze.rst
+++ b/lib/spack/docs/analyze.rst
@@ -59,7 +59,7 @@ are available:
install_files : install file listing read from install_manifest.json
environment_variables : environment variables parsed from spack-build-env.txt
config_args : config args loaded from spack-configure-args.txt
- abigail : Application Binary Interface (ABI) features for objects
+ libabigail : Application Binary Interface (ABI) features for objects
In the above, the first three are fairly simple - parsing metadata files from
diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst
index 71d8d7d866..d62fb08ce8 100644
--- a/lib/spack/docs/build_systems/autotoolspackage.rst
+++ b/lib/spack/docs/build_systems/autotoolspackage.rst
@@ -420,6 +420,24 @@ Or when one variant controls multiple flags:
config_args += self.with_or_without('memchecker', variant='debug_tools')
config_args += self.with_or_without('profiler', variant='debug_tools')
+
+""""""""""""""""""""
+Conditional variants
+""""""""""""""""""""
+
+When a variant is conditional and its condition is not met on the concrete spec, the
+``with_or_without`` and ``enable_or_disable`` methods will simply return an empty list.
+
+For example:
+
+.. code-block:: python
+
+ variant('profiler', when='@2.0:')
+ config_args += self.with_or_without('profiler)
+
+will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
+below ``2.0``.
+
""""""""""""""""""""
Activation overrides
""""""""""""""""""""
diff --git a/lib/spack/docs/build_systems/cmakepackage.rst b/lib/spack/docs/build_systems/cmakepackage.rst
index 3c3c96f92c..7ebac48734 100644
--- a/lib/spack/docs/build_systems/cmakepackage.rst
+++ b/lib/spack/docs/build_systems/cmakepackage.rst
@@ -145,6 +145,20 @@ and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
return args
+Spack supports CMake defines from conditional variants too. Whenever the condition on
+the variant is not met, ``define_from_variant()`` will simply return an empty string,
+and CMake simply ignores the empty command line argument. For example the following
+
+.. code-block:: python
+
+ variant('example', default=True, when='@2.0:')
+
+ def cmake_args(self):
+ return [self.define_from_variant('EXAMPLE', 'example')]
+
+will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
+result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
+
^^^^^^^^^^
Generators
diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst
index 30875d15f3..365c5d7bce 100644
--- a/lib/spack/docs/build_systems/pythonpackage.rst
+++ b/lib/spack/docs/build_systems/pythonpackage.rst
@@ -125,12 +125,15 @@ The zip file will not contain a ``setup.py``, but it will contain a
``METADATA`` file which contains all the information you need to
write a ``package.py`` build recipe.
+.. _pypi:
+
^^^^
PyPI
^^^^
-The vast majority of Python packages are hosted on PyPI - The Python
-Package Index. ``pip`` only supports packages hosted on PyPI, making
+The vast majority of Python packages are hosted on PyPI (The Python
+Package Index), which is :ref:`preferred over GitHub <pypi-vs-github>`
+for downloading packages. ``pip`` only supports packages hosted on PyPI, making
it the only option for developers who want a simple installation.
Search for "PyPI <package-name>" to find the download page. Note that
some pages are versioned, and the first result may not be the newest
@@ -217,6 +220,7 @@ try to extract the wheel:
version('1.11.0', sha256='d8c9d24ea90457214d798b0d922489863dad518adde3638e08ef62de28fb183a', expand=False)
+.. _pypi-vs-github:
"""""""""""""""
PyPI vs. GitHub
@@ -263,6 +267,9 @@ location, but PyPI is preferred for the following reasons:
PyPI is nice because it makes it physically impossible to
re-release the same version of a package with a different checksum.
+Use the :ref:`pypi attribute <pypi>` to facilitate construction of PyPI package
+references.
+
^^^^^^^^^^^^^^^^^^^^^^^^^
Build system dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -709,7 +716,7 @@ The package may have its own unit or regression tests. Spack can
run these tests during the installation by adding phase-appropriate
test methods.
-For example, ``py-numpy`` adds the following as a check to run
+For example, ``py-numpy`` adds the following as a check to run
after the ``install`` phase:
.. code-block:: python
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index 2b58223e1d..d0a2bb9e33 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -30,6 +30,7 @@ from sphinx.ext.apidoc import main as sphinx_apidoc
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('_spack_root/lib/spack/external'))
+sys.path.insert(0, os.path.abspath('_spack_root/lib/spack/external/pytest-fallback'))
if sys.version_info[0] < 3:
sys.path.insert(
diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst
index 17e24b816e..4d639a1c9b 100644
--- a/lib/spack/docs/contribution_guide.rst
+++ b/lib/spack/docs/contribution_guide.rst
@@ -71,7 +71,7 @@ locally to speed up the review process.
new release that is causing problems. If this is the case, please file an issue.
-We currently test against Python 2.6, 2.7, and 3.5-3.7 on both macOS and Linux and
+We currently test against Python 2.7 and 3.5-3.9 on both macOS and Linux and
perform 3 types of tests:
.. _cmd-spack-unit-test:
diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst
index 2530b4ef03..e62b9fae5a 100644
--- a/lib/spack/docs/developer_guide.rst
+++ b/lib/spack/docs/developer_guide.rst
@@ -671,6 +671,13 @@ If you need to write a hook that is relevant to a failure within a build
process, you would want to instead use ``on_phase_failure``.
+"""""""""""""""""""""""""""
+``on_install_cancel(spec)``
+"""""""""""""""""""""""""""
+
+The same, but triggered if a spec install is cancelled for any reason.
+
+
"""""""""""""""""""""""""""""""""""""""""""""""
``on_phase_success(pkg, phase_name, log_file)``
"""""""""""""""""""""""""""""""""""""""""""""""
@@ -1177,6 +1184,10 @@ completed, the steps to make the major release are:
If CI is not passing, submit pull requests to ``develop`` as normal
and keep rebasing the release branch on ``develop`` until CI passes.
+#. Make sure the entire documentation is up to date. If documentation
+ is outdated submit pull requests to ``develop`` as normal
+ and keep rebasing the release branch on ``develop``.
+
#. Follow the steps in :ref:`publishing-releases`.
#. Follow the steps in :ref:`merging-releases`.
diff --git a/lib/spack/docs/module_file_support.rst b/lib/spack/docs/module_file_support.rst
index c6cbc4b22b..689b0415c6 100644
--- a/lib/spack/docs/module_file_support.rst
+++ b/lib/spack/docs/module_file_support.rst
@@ -273,29 +273,30 @@ of the installed software. For instance, in the snippet below:
.. code-block:: yaml
modules:
- tcl:
- # The keyword `all` selects every package
- all:
- environment:
- set:
- BAR: 'bar'
- # This anonymous spec selects any package that
- # depends on openmpi. The double colon at the
- # end clears the set of rules that matched so far.
- ^openmpi::
- environment:
- set:
- BAR: 'baz'
- # Selects any zlib package
- zlib:
- environment:
- prepend_path:
- LD_LIBRARY_PATH: 'foo'
- # Selects zlib compiled with gcc@4.8
- zlib%gcc@4.8:
- environment:
- unset:
- - FOOBAR
+ default:
+ tcl:
+ # The keyword `all` selects every package
+ all:
+ environment:
+ set:
+ BAR: 'bar'
+ # This anonymous spec selects any package that
+ # depends on openmpi. The double colon at the
+ # end clears the set of rules that matched so far.
+ ^openmpi::
+ environment:
+ set:
+ BAR: 'baz'
+ # Selects any zlib package
+ zlib:
+ environment:
+ prepend_path:
+ LD_LIBRARY_PATH: 'foo'
+ # Selects zlib compiled with gcc@4.8
+ zlib%gcc@4.8:
+ environment:
+ unset:
+ - FOOBAR
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
@@ -322,9 +323,10 @@ your system. If you write a configuration file like:
.. code-block:: yaml
modules:
- tcl:
- whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
- blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
+ default:
+ tcl:
+ whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
+ blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
you will prevent the generation of module files for any package that
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
@@ -349,8 +351,9 @@ shows how to set hash length in the module file names:
.. code-block:: yaml
modules:
- tcl:
- hash_length: 7
+ default:
+ tcl:
+ hash_length: 7
To help make module names more readable, and to help alleviate name conflicts
with a short hash, one can use the ``suffixes`` option in the modules
@@ -360,11 +363,12 @@ For instance, the following config options,
.. code-block:: yaml
modules:
- tcl:
- all:
- suffixes:
- ^python@2.7.12: 'python-2.7.12'
- ^openblas: 'openblas'
+ default:
+ tcl:
+ all:
+ suffixes:
+ ^python@2.7.12: 'python-2.7.12'
+ ^openblas: 'openblas'
will add a ``python-2.7.12`` version string to any packages compiled with
python matching the spec, ``python@2.7.12``. This is useful to know which
@@ -379,10 +383,11 @@ covered in :ref:`adding_projections_to_views`.
.. code-block:: yaml
modules:
- tcl:
- projections:
- all: '{name}/{version}-{compiler.name}-{compiler.version}-module'
- ^mpi: '{name}/{version}-{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}-module'
+ default:
+ tcl:
+ projections:
+ all: '{name}/{version}-{compiler.name}-{compiler.version}-module'
+ ^mpi: '{name}/{version}-{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}-module'
will create module files that are nested in directories by package
name, contain the version and compiler name and version, and have the
@@ -403,15 +408,16 @@ that are already in the LMod hierarchy.
.. code-block:: yaml
modules:
- enable:
- - tcl
- tcl:
- projections:
- all: '{name}/{version}-{compiler.name}-{compiler.version}'
- all:
- conflict:
- - '{name}'
- - 'intel/14.0.1'
+ default:
+ enable:
+ - tcl
+ tcl:
+ projections:
+ all: '{name}/{version}-{compiler.name}-{compiler.version}'
+ all:
+ conflict:
+ - '{name}'
+ - 'intel/14.0.1'
will create module files that will conflict with ``intel/14.0.1`` and with the
base directory of the same module, effectively preventing the possibility to
@@ -431,16 +437,17 @@ that are already in the LMod hierarchy.
.. code-block:: yaml
modules:
- enable:
- - lmod
- lmod:
- core_compilers:
- - 'gcc@4.8'
- core_specs:
- - 'python'
- hierarchy:
- - 'mpi'
- - 'lapack'
+ default:
+ enable:
+ - lmod
+ lmod:
+ core_compilers:
+ - 'gcc@4.8'
+ core_specs:
+ - 'python'
+ hierarchy:
+ - 'mpi'
+ - 'lapack'
that will generate a hierarchy in which the ``lapack`` and ``mpi`` layer can be switched
independently. This allows a site to build the same libraries or applications against different
@@ -591,11 +598,12 @@ do so by using the environment blacklist:
.. code-block:: yaml
modules:
- tcl:
- all:
- filter:
- # Exclude changes to any of these variables
- environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ default:
+ tcl:
+ all:
+ filter:
+ # Exclude changes to any of these variables
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
The configuration above will generate module files that will not contain
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
@@ -614,9 +622,10 @@ activated using ``spack activate``:
.. code-block:: yaml
modules:
- tcl:
- ^python:
- autoload: 'direct'
+ default:
+ tcl:
+ ^python:
+ autoload: 'direct'
The configuration file above will produce module files that will
load their direct dependencies if the package installed depends on ``python``.
@@ -633,9 +642,10 @@ The allowed values for the ``autoload`` statement are either ``none``,
.. code-block:: yaml
modules:
- lmod:
- all:
- autoload: 'direct'
+ default:
+ lmod:
+ all:
+ autoload: 'direct'
.. note::
TCL prerequisites
diff --git a/lib/spack/docs/tables/system_prerequisites.csv b/lib/spack/docs/tables/system_prerequisites.csv
index 074be4bffb..980aea77f6 100644
--- a/lib/spack/docs/tables/system_prerequisites.csv
+++ b/lib/spack/docs/tables/system_prerequisites.csv
@@ -1,5 +1,5 @@
Name, Supported Versions, Notes, Requirement Reason
-Python, 2.6/2.7/3.5-3.9, , Interpreter for Spack
+Python, 2.7/3.5-3.9, , Interpreter for Spack
C/C++ Compilers, , , Building software
make, , , Build software
patch, , , Build software
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index c498db0583..59ff0001e1 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -248,7 +248,7 @@ case "$command" in
lang_flags=C
debug_flags="-g"
;;
- c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC)
+ c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC)
command="$SPACK_CXX"
language="C++"
comp="CXX"
diff --git a/lib/spack/env/oneapi/dpcpp b/lib/spack/env/oneapi/dpcpp
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/oneapi/dpcpp
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py
index be4b9fbf14..c4c4855fe0 100644
--- a/lib/spack/external/__init__.py
+++ b/lib/spack/external/__init__.py
@@ -17,13 +17,20 @@ argparse
--------
* Homepage: https://pypi.python.org/pypi/argparse
-* Usage: We include our own version to be Python 2.6 compatible.
+* Usage: We include our own version to be Python 3.X compatible.
* Version: 1.4.0
* Note: This package has been slightly modified to improve
error message formatting. See the following commit if the
vendored copy ever needs to be updated again:
https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418
+attrs
+----------------
+
+* Homepage: https://github.com/python-attrs/attrs
+* Usage: Needed by jsonschema.
+* Version: 21.2.0 (83d3cd70f90a3f4d19ee8b508e58d1c58821c0ad)
+
ctest_log_parser
----------------
@@ -37,49 +44,36 @@ distro
* Homepage: https://pypi.python.org/pypi/distro
* Usage: Provides a more stable linux distribution detection.
-* Version: 1.0.4 (last version supporting Python 2.6)
+* Version: 1.6.0 (64946a1e2a9ff529047070657728600e006c99ff)
+* Note: Last version supporting Python 2.7
-functools
----------
-
-* Homepage: https://github.com/python/cpython/blob/2.7/Lib/functools.py
-* Usage: Used for implementation of total_ordering.
-* Version: Unversioned
-* Note: This is the functools.total_ordering implementation
- from Python 2.7 backported so we can run on Python 2.6.
+functools32
+-----------
+* Homepage: https://github.com/MiCHiLU/python-functools32
+* Usage: Needed by jsonschema when using Python 2.7.
+* Version: 3.2.3-2
jinja2
------
* Homepage: https://pypi.python.org/pypi/Jinja2
* Usage: A modern and designer-friendly templating language for Python.
-* Version: 2.10
+* Version: 2.11.3 (last version supporting Python 2.7)
jsonschema
----------
* Homepage: https://pypi.python.org/pypi/jsonschema
* Usage: An implementation of JSON Schema for Python.
-* Version: 2.4.0 (last version before functools32 dependency was added)
-* Note: functools32 doesn't support Python 2.6 or 3.0, so jsonschema
- cannot be upgraded any further until we drop 2.6.
- Also, jsonschema/validators.py has been modified NOT to try to import
- requests (see 7a1dd517b8).
+* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
+* Note: We don't include tests or benchmarks; just what Spack needs.
markupsafe
----------
* Homepage: https://pypi.python.org/pypi/MarkupSafe
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
-* Version: 1.0
-
-orderddict
-----------
-
-* Homepage: https://pypi.org/project/ordereddict/
-* Usage: A drop-in substitute for Py2.7's new collections.OrderedDict
- that works in Python 2.4-2.6.
-* Version: 1.1
+* Version: 1.1.1 (last version supporting Python 2.7)
py
--
@@ -91,6 +85,14 @@ py
* Note: This packages has been modified:
* https://github.com/pytest-dev/py/pull/186 was backported
+pyrsistent
+----------
+
+* Homepage: http://github.com/tobgu/pyrsistent/
+* Usage: Needed by `jsonschema`
+* Version: 0.16.1 (last version supporting Python 2.7)
+* Note: We only include the parts needed for `jsonschema`.
+
pytest
------
@@ -120,7 +122,7 @@ six
* Homepage: https://pypi.python.org/pypi/six
* Usage: Python 2 and 3 compatibility utilities.
-* Version: 1.11.0
+* Version: 1.16.0
macholib
--------
diff --git a/lib/spack/external/attr/LICENSE b/lib/spack/external/attr/LICENSE
new file mode 100644
index 0000000000..7ae3df9309
--- /dev/null
+++ b/lib/spack/external/attr/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/lib/spack/external/attr/__init__.py b/lib/spack/external/attr/__init__.py
new file mode 100644
index 0000000000..b1ce7fe248
--- /dev/null
+++ b/lib/spack/external/attr/__init__.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+from functools import partial
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._version_info import VersionInfo
+
+
+__version__ = "21.2.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
+
+__title__ = "attrs"
+__description__ = "Classes Without Boilerplate"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
+__doc__ = __description__ + " <" + __uri__ + ">"
+
+__author__ = "Hynek Schlawack"
+__email__ = "hs@ox.cx"
+
+__license__ = "MIT"
+__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+__all__ = [
+ "Attribute",
+ "Factory",
+ "NOTHING",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "evolve",
+ "exceptions",
+ "fields",
+ "fields_dict",
+ "filters",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+if sys.version_info[:2] >= (3, 6):
+ from ._next_gen import define, field, frozen, mutable
+
+ __all__.extend((define, field, frozen, mutable))
diff --git a/lib/spack/external/attr/_cmp.py b/lib/spack/external/attr/_cmp.py
new file mode 100644
index 0000000000..b747b603f1
--- /dev/null
+++ b/lib/spack/external/attr/_cmp.py
@@ -0,0 +1,152 @@
+from __future__ import absolute_import, division, print_function
+
+import functools
+
+from ._compat import new_class
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
+ ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if
+ at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ :param Optional[callable] eq: `callable` used to evaluate equality
+ of two objects.
+ :param Optional[callable] lt: `callable` used to evaluate whether
+ one object is less than another object.
+ :param Optional[callable] le: `callable` used to evaluate whether
+ one object is less than or equal to another object.
+ :param Optional[callable] gt: `callable` used to evaluate whether
+ one object is greater than another object.
+ :param Optional[callable] ge: `callable` used to evaluate whether
+ one object is greater than or equal to another object.
+
+ :param bool require_same_type: When `True`, equality and ordering methods
+ will return `NotImplemented` if objects are not of the same type.
+
+ :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = _make_ne()
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ raise ValueError(
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = "__%s__" % (name,)
+ method.__doc__ = "Return a %s b. Computed by attrs." % (
+ _operation_names[name],
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ for func in self._requirements:
+ if not func(self, other):
+ return False
+ return True
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git a/lib/spack/external/attr/_compat.py b/lib/spack/external/attr/_compat.py
new file mode 100644
index 0000000000..6939f338da
--- /dev/null
+++ b/lib/spack/external/attr/_compat.py
@@ -0,0 +1,242 @@
+from __future__ import absolute_import, division, print_function
+
+import platform
+import sys
+import types
+import warnings
+
+
+PY2 = sys.version_info[0] == 2
+PYPY = platform.python_implementation() == "PyPy"
+
+
+if PYPY or sys.version_info[:2] >= (3, 6):
+ ordered_dict = dict
+else:
+ from collections import OrderedDict
+
+ ordered_dict = OrderedDict
+
+
+if PY2:
+ from collections import Mapping, Sequence
+
+ from UserDict import IterableUserDict
+
+ # We 'bundle' isclass instead of using inspect as importing inspect is
+ # fairly expensive (order of 10-15 ms for a modern machine in 2016)
+ def isclass(klass):
+ return isinstance(klass, (type, types.ClassType))
+
+ def new_class(name, bases, kwds, exec_body):
+ """
+ A minimal stub of types.new_class that we need for make_class.
+ """
+ ns = {}
+ exec_body(ns)
+
+ return type(name, bases, ns)
+
+ # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
+ TYPE = "type"
+
+ def iteritems(d):
+ return d.iteritems()
+
+ # Python 2 is bereft of a read-only dict proxy, so we make one!
+ class ReadOnlyDict(IterableUserDict):
+ """
+ Best-effort read-only dict wrapper.
+ """
+
+ def __setitem__(self, key, val):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item assignment"
+ )
+
+ def update(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'update'"
+ )
+
+ def __delitem__(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item deletion"
+ )
+
+ def clear(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'clear'"
+ )
+
+ def pop(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'pop'"
+ )
+
+ def popitem(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'popitem'"
+ )
+
+ def setdefault(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'setdefault'"
+ )
+
+ def __repr__(self):
+ # Override to be identical to the Python 3 version.
+ return "mappingproxy(" + repr(self.data) + ")"
+
+ def metadata_proxy(d):
+ res = ReadOnlyDict()
+ res.data.update(d) # We blocked update, so we have to do it like this.
+ return res
+
+ def just_warn(*args, **kw): # pragma: no cover
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+
+
+else: # Python 3 and later.
+ from collections.abc import Mapping, Sequence # noqa
+
+ def just_warn(*args, **kw):
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+ warnings.warn(
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+
+ def isclass(klass):
+ return isinstance(klass, type)
+
+ TYPE = "class"
+
+ def iteritems(d):
+ return d.items()
+
+ new_class = types.new_class
+
+ def metadata_proxy(d):
+ return types.MappingProxyType(dict(d))
+
+
+def make_set_closure_cell():
+ """Return a function of two arguments (cell, value) which sets
+ the value stored in the closure cell `cell` to `value`.
+ """
+ # pypy makes this easy. (It also supports the logic below, but
+ # why not do the easy/fast thing?)
+ if PYPY:
+
+ def set_closure_cell(cell, value):
+ cell.__setstate__((value,))
+
+ return set_closure_cell
+
+ # Otherwise gotta do it the hard way.
+
+ # Create a function that will set its first cellvar to `value`.
+ def set_first_cellvar_to(value):
+ x = value
+ return
+
+ # This function will be eliminated as dead code, but
+ # not before its reference to `x` forces `x` to be
+ # represented as a closure cell rather than a local.
+ def force_x_to_be_a_cell(): # pragma: no cover
+ return x
+
+ try:
+ # Extract the code object and make sure our assumptions about
+ # the closure behavior are correct.
+ if PY2:
+ co = set_first_cellvar_to.func_code
+ else:
+ co = set_first_cellvar_to.__code__
+ if co.co_cellvars != ("x",) or co.co_freevars != ():
+ raise AssertionError # pragma: no cover
+
+ # Convert this code object to a code object that sets the
+ # function's first _freevar_ (not cellvar) to the argument.
+ if sys.version_info >= (3, 8):
+ # CPython 3.8+ has an incompatible CodeType signature
+ # (added a posonlyargcount argument) but also added
+ # CodeType.replace() to do this without counting parameters.
+ set_first_freevar_code = co.replace(
+ co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
+ )
+ else:
+ args = [co.co_argcount]
+ if not PY2:
+ args.append(co.co_kwonlyargcount)
+ args.extend(
+ [
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ # These two arguments are reversed:
+ co.co_cellvars,
+ co.co_freevars,
+ ]
+ )
+ set_first_freevar_code = types.CodeType(*args)
+
+ def set_closure_cell(cell, value):
+ # Create a function using the set_first_freevar_code,
+ # whose first closure cell is `cell`. Calling it will
+ # change the value of that cell.
+ setter = types.FunctionType(
+ set_first_freevar_code, {}, "setter", (), (cell,)
+ )
+ # And call it to set the cell.
+ setter(value)
+
+ # Make sure it works on this interpreter:
+ def make_func_with_cell():
+ x = None
+
+ def func():
+ return x # pragma: no cover
+
+ return func
+
+ if PY2:
+ cell = make_func_with_cell().func_closure[0]
+ else:
+ cell = make_func_with_cell().__closure__[0]
+ set_closure_cell(cell, 100)
+ if cell.cell_contents != 100:
+ raise AssertionError # pragma: no cover
+
+ except Exception:
+ return just_warn
+ else:
+ return set_closure_cell
+
+
+set_closure_cell = make_set_closure_cell()
diff --git a/lib/spack/external/attr/_config.py b/lib/spack/external/attr/_config.py
new file mode 100644
index 0000000000..8ec920962d
--- /dev/null
+++ b/lib/spack/external/attr/_config.py
@@ -0,0 +1,23 @@
+from __future__ import absolute_import, division, print_function
+
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+ """
+ if not isinstance(run, bool):
+ raise TypeError("'run' must be bool.")
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+ """
+ return _run_validators
diff --git a/lib/spack/external/attr/_funcs.py b/lib/spack/external/attr/_funcs.py
new file mode 100644
index 0000000000..fda508c5c4
--- /dev/null
+++ b/lib/spack/external/attr/_funcs.py
@@ -0,0 +1,395 @@
+from __future__ import absolute_import, division, print_function
+
+import copy
+
+from ._compat import iteritems
+from ._make import NOTHING, _obj_setattr, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a dict.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attr.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable dict_factory: A callable to produce dictionaries from. For
+ example, to produce ordered dictionaries instead of normal Python
+ dictionaries, pass in ``collections.OrderedDict``.
+ :param bool retain_collection_types: Do not convert to ``list`` when
+ encountering an attribute whose type is ``tuple`` or ``set``. Only
+ meaningful if ``recurse`` is ``True``.
+ :param Optional[callable] value_serializer: A hook that is called for every
+ attribute or dict key/value. It receives the current instance, field
+ and value and must return the (updated) value. The hook is run *after*
+ the optional *filter* has been applied.
+
+ :rtype: return type of *dict_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ if has(v.__class__):
+ rv[a.name] = asdict(
+ v,
+ True,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain_collection_types is True else list
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
+ i,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ for i in v
+ ]
+ )
+ elif isinstance(v, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ filter,
+ df,
+ retain_collection_types,
+ value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ filter,
+ df,
+ retain_collection_types,
+ value_serializer,
+ ),
+ )
+ for kk, vv in iteritems(v)
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ True,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ elif isinstance(val, (tuple, list, set, frozenset)):
+ cf = val.__class__ if retain_collection_types is True else list
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk, filter, df, retain_collection_types, value_serializer
+ ),
+ _asdict_anything(
+ vv, filter, df, retain_collection_types, value_serializer
+ ),
+ )
+ for kk, vv in iteritems(val)
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a tuple.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attr.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable tuple_factory: A callable to produce tuples from. For
+ example, to produce lists instead of tuples.
+ :param bool retain_collection_types: Do not convert to ``list``
+ or ``dict`` when encountering an attribute which type is
+ ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
+ ``True``.
+
+ :rtype: return type of *tuple_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
+ elif isinstance(v, dict):
+ df = v.__class__ if retain is True else dict
+ rv.append(
+ df(
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
+ )
+ for kk, vv in iteritems(v)
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with ``attrs`` attributes.
+
+ :param type cls: Class to introspect.
+ :raise TypeError: If *cls* is not a class.
+
+ :rtype: bool
+ """
+ return getattr(cls, "__attrs_attrs__", None) is not None
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
+ be found on *cls*.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. deprecated:: 17.1.0
+ Use `evolve` instead.
+ """
+ import warnings
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in iteritems(changes):
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ raise AttrsAttributeNotFoundError(
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
+ )
+ _obj_setattr(new, k, v)
+ return new
+
+
+def evolve(inst, **changes):
+ """
+ Create a new instance, based on *inst* with *changes* applied.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise TypeError: If *attr_name* couldn't be found in the class
+ ``__init__``.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 17.1.0
+ """
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+def resolve_types(cls, globalns=None, localns=None, attribs=None):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in `Attribute`'s *type*
+ field. In other words, you don't need to resolve your types if you only
+ use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, e.g. if the name only exists
+ inside a method, you may pass *globalns* or *localns* to specify other
+ dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ :param type cls: Class to resolve.
+ :param Optional[dict] globalns: Dictionary containing global variables.
+ :param Optional[dict] localns: Dictionary containing local variables.
+ :param Optional[list] attribs: List of attribs for the given class.
+ This is necessary when calling from inside a ``field_transformer``
+ since *cls* is not an ``attrs`` class yet.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class and you didn't pass any attribs.
+ :raise NameError: If types cannot be resolved because of missing variables.
+
+ :returns: *cls* so you can use this function also as a class decorator.
+ Please note that you have to apply it **after** `attr.s`. That means
+ the decorator has to come in the line **before** `attr.s`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+
+ """
+ try:
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ cls.__attrs_types_resolved__
+ except AttributeError:
+ import typing
+
+ hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _obj_setattr(field, "type", hints[field.name])
+ cls.__attrs_types_resolved__ = True
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git a/lib/spack/external/attr/_make.py b/lib/spack/external/attr/_make.py
new file mode 100644
index 0000000000..a1912b1233
--- /dev/null
+++ b/lib/spack/external/attr/_make.py
@@ -0,0 +1,3052 @@
+from __future__ import absolute_import, division, print_function
+
+import copy
+import inspect
+import linecache
+import sys
+import threading
+import uuid
+import warnings
+
+from operator import itemgetter
+
+from . import _config, setters
+from ._compat import (
+ PY2,
+ PYPY,
+ isclass,
+ iteritems,
+ metadata_proxy,
+ new_class,
+ ordered_dict,
+ set_closure_cell,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+ UnannotatedAttributeError,
+)
+
+
+if not PY2:
+ import typing
+
+
+# This is used at least twice, so cache it here.
+_obj_setattr = object.__setattr__
+_init_converter_pat = "__attr_converter_%s"
+_init_factory_pat = "__attr_factory_{}"
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = metadata_proxy({})
+
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
+
+class _Nothing(object):
+ """
+ Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
+
+ ``_Nothing`` is a singleton. There is only ever one of it.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ """
+
+ _singleton = None
+
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super(_Nothing, cls).__new__(cls)
+ return _Nothing._singleton
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+ def __len__(self):
+ return 0 # __bool__ for Python 2
+
+
+NOTHING = _Nothing()
+"""
+Sentinel to indicate the lack of a value when ``None`` is ambiguous.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since ``None``
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ if PY2:
+ # For some reason `type(None)` isn't callable in Python 2, but we don't
+ # actually need a constructor for None objects, we just need any
+ # available function that returns None.
+ def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)):
+ return _none_constructor, _args
+
+ else:
+
+ def __reduce__(self, _none_constructor=type(None), _args=()):
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Create a new attribute on a class.
+
+ .. warning::
+
+ Does *not* do anything unless the class is also decorated with
+ `attr.s`!
+
+ :param default: A value that is used if an ``attrs``-generated ``__init__``
+ is used and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `Factory`, its callable will be
+ used to construct a new value (useful for mutable data types like lists
+ or dicts).
+
+ If a default is not set (or set manually to `attr.NOTHING`), a value
+ *must* be supplied when instantiating; otherwise a `TypeError`
+ will be raised.
+
+ The default can also be set using decorator notation as shown below.
+
+ :type default: Any value
+
+ :param callable factory: Syntactic sugar for
+ ``default=attr.Factory(factory)``.
+
+ :param validator: `callable` that is called by ``attrs``-generated
+ ``__init__`` methods after the instance has been initialized. They
+ receive the initialized instance, the `Attribute`, and the
+ passed value.
+
+ The return value is *not* inspected so the validator has to throw an
+ exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `get_run_validators`.
+
+ The validator can also be set using decorator notation as shown below.
+
+ :type validator: `callable` or a `list` of `callable`\\ s.
+
+ :param repr: Include this attribute in the generated ``__repr__``
+ method. If ``True``, include the attribute; if ``False``, omit it. By
+ default, the built-in ``repr()`` function is used. To override how the
+ attribute value is formatted, pass a ``callable`` that takes a single
+ value and returns a string. Note that the resulting string is used
+ as-is, i.e. it will be used directly *instead* of calling ``repr()``
+ (the default).
+ :type repr: a `bool` or a `callable` to use a custom function.
+
+ :param eq: If ``True`` (default), include this attribute in the
+ generated ``__eq__`` and ``__ne__`` methods that check two instances
+ for equality. To override how the attribute value is compared,
+ pass a ``callable`` that takes a single value and returns the value
+ to be compared.
+ :type eq: a `bool` or a `callable`.
+
+ :param order: If ``True`` (default), include this attributes in the
+ generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+ To override how the attribute value is ordered,
+ pass a ``callable`` that takes a single value and returns the value
+ to be ordered.
+ :type order: a `bool` or a `callable`.
+
+ :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
+ same value. Must not be mixed with *eq* or *order*.
+ :type cmp: a `bool` or a `callable`.
+
+ :param Optional[bool] hash: Include this attribute in the generated
+ ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This
+ is the correct behavior according the Python spec. Setting this value
+ to anything else than ``None`` is *discouraged*.
+ :param bool init: Include this attribute in the generated ``__init__``
+ method. It is possible to set this to ``False`` and set a default
+ value. In that case this attributed is unconditionally initialized
+ with the specified default value or factory.
+ :param callable converter: `callable` that is called by
+ ``attrs``-generated ``__init__`` methods to convert attribute's value
+ to the desired format. It is given the passed-in value, and the
+ returned value will be used as the new value of the attribute. The
+ value is converted before being passed to the validator, if any.
+ :param metadata: An arbitrary mapping, to be used by third-party
+ components. See `extending_metadata`.
+ :param type: The type of the attribute. In Python 3.6 or greater, the
+ preferred method to specify the type is using a variable annotation
+ (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
+ This argument is provided for backward compatibility.
+ Regardless of the approach used, the type will be stored on
+ ``Attribute.type``.
+
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
+ `static type checking <types>`.
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param on_setattr: Allows to overwrite the *on_setattr* setting from
+ `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
+ Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `attr.s`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attr.setters.NO_OP`
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is ``None`` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+ *convert* to achieve consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+
+ if factory is not None:
+ if default is not NOTHING:
+ raise ValueError(
+ "The `default` and `factory` arguments are mutually "
+ "exclusive."
+ )
+ if not callable(factory):
+ raise ValueError("The `factory` argument must be a callable.")
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ )
+
+
+def _compile_and_eval(script, globs, locs=None, filename=""):
+ """
+ "Exec" the script with the given global (globs) and local (locs) variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs=None):
+ """
+ Create the method with the script given and return the method object.
+ """
+ locs = {}
+ if globs is None:
+ globs = {}
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ linecache.cache[filename] = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+
+ return locs[name]
+
+
+def _make_attr_tuple_class(cls_name, attr_names):
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = "{}Attributes".format(cls_name)
+ attr_class_template = [
+ "class {}(tuple):".format(attr_class_name),
+ " __slots__ = ()",
+ ]
+ if attr_names:
+ for i, attr_name in enumerate(attr_names):
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
+ else:
+ attr_class_template.append(" pass")
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ _compile_and_eval("\n".join(attr_class_template), globs)
+ return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_classvar_prefixes)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+
+ Requires Python 3.
+ """
+ attr = getattr(cls, attrib_name, _sentinel)
+ if attr is _sentinel:
+ return False
+
+ for base_cls in cls.__mro__[1:]:
+ a = getattr(base_cls, attrib_name, None)
+ if attr is a:
+ return False
+
+ return True
+
+
+def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ if _has_own_attribute(cls, "__annotations__"):
+ return cls.__annotations__
+
+ return {}
+
+
+def _counter_getter(e):
+ """
+ Key function for sorting to avoid re-creating a lambda for every class.
+ """
+ return e[1].counter
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ *collect_by_mro* is True, collect them in the correct MRO order, otherwise
+ use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = [(name, ca) for name, ca in iteritems(these)]
+
+ if not isinstance(these, ordered_dict):
+ ca_list.sort(key=_counter_getter)
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if not isinstance(a, _CountingAttr):
+ if a is NOTHING:
+ a = attrib()
+ else:
+ a = attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ attr_names = [a.name for a in base_attrs + own_attrs]
+
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ if kw_only:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = AttrsClass(base_attrs + own_attrs)
+
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ raise ValueError(
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: %r" % (a,)
+ )
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ if field_transformer is not None:
+ attrs = field_transformer(cls, attrs)
+ return _Attributes((attrs, base_attrs, base_attr_map))
+
+
+if PYPY:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError()
+
+
+else:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ raise FrozenInstanceError()
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ raise FrozenInstanceError()
+
+
+class _ClassBuilder(object):
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_pre_init",
+ "_has_post_init",
+ "_is_exc",
+ "_on_setattr",
+ "_slots",
+ "_weakref_slot",
+ "_has_own_setattr",
+ "_has_custom_setattr",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ getstate_setstate,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_custom_setattr,
+ field_transformer,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = set(a.name for a in base_attrs)
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+ self._on_setattr = on_setattr
+
+ self._has_custom_setattr = has_custom_setattr
+ self._has_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._has_own_setattr = True
+
+ if getstate_setstate:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ def __repr__(self):
+ return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ return self._create_slots_class()
+ else:
+ return self._patch_original_class()
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _sentinel) is not _sentinel
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._has_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = object.__setattr__
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in iteritems(self._cls_dict)
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ }
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._has_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = object.__setattr__
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = dict()
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overriden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in iteritems(existing_slots)
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
+
+ qualname = getattr(self._cls, "__qualname__", None)
+ if qualname is not None:
+ cd["__qualname__"] = qualname
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # https://github.com/python-attrs/attrs/issues/102. On Python 3,
+ # if a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in cls.__dict__.values():
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ set_closure_cell(cell, cls)
+
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns=ns)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ raise ValueError(
+ "__str__ can only be generated if a __repr__ exists."
+ )
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return tuple(getattr(self, name) for name in state_attr_names)
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_hash_cache_field, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr is not None
+ and self._on_setattr is not setters.NO_OP,
+ attrs_init=False,
+ )
+ )
+
+ return self
+
+ def add_attrs_init(self):
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr is not None
+ and self._on_setattr is not setters.NO_OP,
+ attrs_init=True,
+ )
+ )
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ cd["__eq__"] = self._add_method_dunders(
+ _make_eq(self._cls, self._attrs)
+ )
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ if self._frozen:
+ return self
+
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ raise ValueError(
+ "Can't combine custom __setattr__ with on_setattr hooks."
+ )
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _obj_setattr(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._has_own_setattr = True
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ try:
+ method.__module__ = self._cls.__module__
+ except AttributeError:
+ pass
+
+ try:
+ method.__qualname__ = ".".join(
+ (self._cls.__qualname__, method.__name__)
+ )
+ except AttributeError:
+ pass
+
+ try:
+ method.__doc__ = "Method generated by attrs for class %s." % (
+ self._cls.__qualname__,
+ )
+ except AttributeError:
+ pass
+
+ return method
+
+
+_CMP_DEPRECATION = (
+ "The usage of `cmp` is deprecated and will be removed on or after "
+ "2021-06-01. Please use `eq` and `order` instead."
+)
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+
+ auto_detect must be False on Python 2.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+):
+ r"""
+ A class decorator that adds `dunder
+ <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ :param these: A dictionary of name to `attr.ib` mappings. This is
+ useful to avoid the definition of your attributes within the class body
+ because you can't (e.g. if you want to add ``__repr__`` methods to
+ Django models) or don't want to.
+
+ If *these* is not ``None``, ``attrs`` will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ If *these* is an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the attributes inside *these*. Otherwise the order
+ of the definition of the attributes is used.
+
+ :type these: `dict` of `str` to `attr.ib`
+
+ :param str repr_ns: When using nested classes, there's no way in Python 2
+ to automatically detect that. Therefore it's possible to set the
+ namespace explicitly for a more meaningful ``repr`` output.
+ :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
+ *order*, and *hash* arguments explicitly, assume they are set to
+ ``True`` **unless any** of the involved methods for one of the
+ arguments is implemented in the *current* class (i.e. it is *not*
+ inherited from some base class).
+
+ So for example by implementing ``__eq__`` on a class yourself,
+ ``attrs`` will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
+ ``__ne__`` by default, so it *should* be enough to only implement
+ ``__eq__`` in most cases).
+
+ .. warning::
+
+ If you prevent ``attrs`` from creating the ordering methods for you
+ (``order=False``, e.g. by implementing ``__le__``), it becomes
+ *your* responsibility to make sure its ordering is sound. The best
+ way is to use the `functools.total_ordering` decorator.
+
+
+ Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
+ *cmp*, or *hash* overrides whatever *auto_detect* would determine.
+
+ *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
+ a `PythonTooOldError`.
+
+ :param bool repr: Create a ``__repr__`` method with a human readable
+ representation of ``attrs`` attributes..
+ :param bool str: Create a ``__str__`` method that is identical to
+ ``__repr__``. This is usually not necessary except for
+ `Exception`\ s.
+ :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
+ and ``__ne__`` methods that check two instances for equality.
+
+ They compare the instances as if they were tuples of their ``attrs``
+ attributes if and only if the types of both classes are *identical*!
+ :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
+ ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
+ allow instances to be ordered. If ``None`` (default) mirror value of
+ *eq*.
+ :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
+ and *order* to the same value. Must not be mixed with *eq* or *order*.
+ :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
+ is generated according how *eq* and *frozen* are set.
+
+ 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
+ None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning the
+ ``__hash__`` method of the base class will be used (if base class is
+ ``object``, this means it will fall back to id-based hashing.).
+
+ Although not recommended, you can decide for yourself and force
+ ``attrs`` to create one (e.g. if the class is immutable even though you
+ didn't freeze it programmatically) by passing ``True`` or not. Both of
+ these cases are rather special and should be used carefully.
+
+ See our documentation on `hashing`, Python's documentation on
+ `object.__hash__`, and the `GitHub issue that led to the default \
+ behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
+ details.
+ :param bool init: Create a ``__init__`` method that initializes the
+ ``attrs`` attributes. Leading underscores are stripped for the argument
+ name. If a ``__attrs_pre_init__`` method exists on the class, it will
+ be called before the class is initialized. If a ``__attrs_post_init__``
+ method exists on the class, it will be called after the class is fully
+ initialized.
+
+ If ``init`` is ``False``, an ``__attrs_init__`` method will be
+ injected instead. This allows you to define a custom ``__init__``
+ method that can do pre-init work such as ``super().__init__()``,
+ and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
+ :param bool slots: Create a `slotted class <slotted classes>` that's more
+ memory-efficient. Slotted classes are generally superior to the default
+ dict classes, but have some gotchas you should know about, so we
+ encourage you to read the `glossary entry <slotted classes>`.
+ :param bool frozen: Make instances immutable after initialization. If
+ someone attempts to modify a frozen instance,
+ `attr.exceptions.FrozenInstanceError` is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__`` method
+ on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ <how-frozen>` when initializing new instances. In other words:
+ ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You can
+ circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
+ :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated
+ attributes (Python 3.6 and later only) from the class body.
+
+ In this case, you **must** annotate every field. If ``attrs``
+ encounters a field that is set to an `attr.ib` but lacks a type
+ annotation, an `attr.exceptions.UnannotatedAttributeError` is
+ raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+ want to set a type.
+
+ If you assign a value to those attributes (e.g. ``x: int = 42``), that
+ value becomes the default value like if it were passed using
+ ``attr.ib(default=42)``. Passing an instance of `Factory` also
+ works as expected in most cases (see warning below).
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `attr.ib` are **ignored**.
+
+ .. warning::
+ For features that use the attribute name to create decorators (e.g.
+ `validators <validators>`), you still *must* assign `attr.ib` to
+ them. Otherwise Python will either not find the name or try to use
+ the default value to call e.g. ``validator`` on it.
+
+ These errors can be quite confusing and probably the most common bug
+ report on our bug tracker.
+
+ .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
+ class. If the hash code is cached, avoid any reassignments of
+ fields involved in hash code computation or mutations of the objects
+ those fields point to after object creation. If such changes occur,
+ the behavior of the object's hash code is undefined.
+ :param bool auto_exc: If the class subclasses `BaseException`
+ (which implicitly includes any subclass of any exception), the
+ following happens to behave like a well-behaved Python exceptions
+ class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids (N.B. ``attrs`` will
+ *not* remove existing implementations of ``__hash__`` or the equality
+ methods. It just won't add own ones.),
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the ``args``
+ attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base classes.
+ :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
+ collects attributes from base classes. The default behavior is
+ incorrect in certain cases of multiple inheritance. It should be on by
+ default but is kept off for backward-compatability.
+
+ See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
+ more details.
+
+ :param Optional[bool] getstate_setstate:
+ .. note::
+ This is usually only interesting for slotted classes and you should
+ probably just set *auto_detect* to `True`.
+
+ If `True`, ``__getstate__`` and
+ ``__setstate__`` are generated and attached to the class. This is
+ necessary for slotted classes to be pickleable. If left `None`, it's
+ `True` by default for slotted classes and ``False`` for dict classes.
+
+ If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
+ and **either** ``__getstate__`` or ``__setstate__`` is detected directly
+ on the class (i.e. not inherited), it is set to `False` (this is usually
+ what you want).
+
+ :param on_setattr: A callable that is run whenever the user attempts to set
+ an attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
+ as validators: the instance, the attribute that is being modified, and
+ the new value.
+
+ If no exception is raised, the attribute is set to the return value of
+ the callable.
+
+ If a list of callables is passed, they're automatically wrapped in an
+ `attr.setters.pipe`.
+
+ :param Optional[callable] field_transformer:
+ A function that is called with the original class object and all
+ fields right before ``attrs`` finalizes the class. You can use
+ this, e.g., to automatically add converters or validators to
+ fields based on their types. See `transform-fields` for more details.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports ``None`` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ """
+ if auto_detect and PY2:
+ raise PythonTooOldError(
+ "auto_detect only works on Python 3 and later."
+ )
+
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+ hash_ = hash # work around the lack of nonlocal
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+
+ if getattr(cls, "__class__", None) is None:
+ raise TypeError("attrs only works with new-style classes.")
+
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ raise ValueError("Can't freeze a class with a custom __setattr__.")
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ is_frozen,
+ weakref_slot,
+ _determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_own_setattr,
+ field_transformer,
+ )
+ if _determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ):
+ builder.add_repr(repr_ns)
+ if str is True:
+ builder.add_str()
+
+ eq = _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+ if not is_exc and eq is True:
+ builder.add_eq()
+ if not is_exc and _determine_whether_to_implement(
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+ ):
+ builder.add_order()
+
+ builder.add_setattr()
+
+ if (
+ hash_ is None
+ and auto_detect is True
+ and _has_own_attribute(cls, "__hash__")
+ ):
+ hash = False
+ else:
+ hash = hash_
+ if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+ elif hash is False or (hash is None and eq is False) or is_exc:
+ # Don't do anything. Should fall back to __object__'s __hash__
+ # which is by id.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ elif hash is True or (
+ hash is None and eq is True and is_frozen is True
+ ):
+ # Build a __hash__ if told so, or if it's safe.
+ builder.add_hash()
+ else:
+ # Raise TypeError on attempts to hash.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ builder.make_unhashable()
+
+ if _determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ):
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+if PY2:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return (
+ getattr(cls.__setattr__, "__module__", None)
+ == _frozen_setattrs.__module__
+ and cls.__setattr__.__name__ == _frozen_setattrs.__name__
+ )
+
+
+else:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ == _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ unique_id = uuid.uuid4()
+ extra = ""
+ count = 1
+
+ while True:
+ unique_filename = "<attrs generated {0} {1}.{2}{3}>".format(
+ func_name,
+ cls.__module__,
+ getattr(cls, "__qualname__", cls.__name__),
+ extra,
+ )
+ # To handle concurrency we essentially "reserve" our spot in
+ # the linecache with a dummy line. The caller can then
+ # set this value correctly.
+ cache_line = (1, None, (str(unique_id),), unique_filename)
+ if (
+ linecache.cache.setdefault(unique_filename, cache_line)
+ == cache_line
+ ):
+ return unique_filename
+
+ # Looks like this spot is taken. Try again.
+ count += 1
+ extra = "-{0}".format(count)
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ unique_filename = _generate_unique_filename(cls, "hash")
+ type_hash = hash(unique_filename)
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ if not PY2:
+ hash_def += ", *"
+
+ hash_def += (
+ ", _cache_wrapper="
+ + "__import__('attr._make')._make._CacheHashWrapper):"
+ )
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + " %d," % (type_hash,),
+ ]
+ )
+
+ for a in attrs:
+ method_lines.append(indent + " self.%s," % a.name)
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return _make_method("__hash__", script, unique_filename)
+
+
+def _add_hash(cls, attrs):
+ """
+ Add a hash method to *cls*.
+ """
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def _make_ne():
+ """
+ Create __ne__ method.
+ """
+
+ def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+ return __ne__
+
+
+def _make_eq(cls, attrs):
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ unique_filename = _generate_unique_filename(cls, "eq")
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ others = [" ) == ("]
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ " %s(self.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ others.append(
+ " %s(other.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ else:
+ lines.append(" self.%s," % (a.name,))
+ others.append(" other.%s," % (a.name,))
+
+ lines += others + [" )"]
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__ = _make_eq(cls, attrs)
+ cls.__ne__ = _make_ne()
+
+ return cls
+
+
+_already_repring = threading.local()
+
+
+def _make_repr(attrs, ns):
+ """
+ Make a repr method that includes relevant *attrs*, adding *ns* to the full
+ name.
+ """
+
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom callable.
+ attr_names_with_reprs = tuple(
+ (a.name, repr if a.repr is True else a.repr)
+ for a in attrs
+ if a.repr is not False
+ )
+
+ def __repr__(self):
+ """
+ Automatically created by attrs.
+ """
+ try:
+ working_set = _already_repring.working_set
+ except AttributeError:
+ working_set = set()
+ _already_repring.working_set = working_set
+
+ if id(self) in working_set:
+ return "..."
+ real_cls = self.__class__
+ if ns is None:
+ qualname = getattr(real_cls, "__qualname__", None)
+ if qualname is not None:
+ class_name = qualname.rsplit(">.", 1)[-1]
+ else:
+ class_name = real_cls.__name__
+ else:
+ class_name = ns + "." + real_cls.__name__
+
+ # Since 'self' remains on the stack (i.e.: strongly referenced) for the
+ # duration of this call, it's safe to depend on id(...) stability, and
+ # not need to track the instance and therefore worry about properties
+ # like weakref- or hash-ability.
+ working_set.add(id(self))
+ try:
+ result = [class_name, "("]
+ first = True
+ for name, attr_repr in attr_names_with_reprs:
+ if first:
+ first = False
+ else:
+ result.append(", ")
+ result.extend(
+ (name, "=", attr_repr(getattr(self, name, NOTHING)))
+ )
+ return "".join(result) + ")"
+ finally:
+ working_set.remove(id(self))
+
+ return __repr__
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns)
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of ``attrs`` attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: tuple (with name accessors) of `attr.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of ``attrs`` attributes for a class, whose
+ keys are the attribute names.
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: an ordered dict where keys are attribute names and values are
+ `attr.Attribute`\\ s. This will be a `dict` if it's
+ naturally ordered like on Python 3.6+ or an
+ :class:`~collections.OrderedDict` otherwise.
+
+ .. versionadded:: 18.1.0
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return ordered_dict(((a.name, a) for a in attrs))
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_cls(cls):
+ return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _make_init(
+ cls,
+ attrs,
+ pre_init,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ has_global_on_setattr,
+ attrs_init,
+):
+ if frozen and has_global_on_setattr:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = True
+ elif (
+ has_global_on_setattr and a.on_setattr is not setters.NO_OP
+ ) or _is_slot_attr(a.name, base_attr_map):
+ needs_cached_setattr = True
+
+ unique_filename = _generate_unique_filename(cls, "init")
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_global_on_setattr,
+ attrs_init,
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_cached_setattr"] = _obj_setattr
+
+ init = _make_method(
+ "__attrs_init__" if attrs_init else "__init__",
+ script,
+ unique_filename,
+ globs,
+ )
+ init.__annotations__ = annotations
+
+ return init
+
+
+def _setattr(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return "_setattr('%s', %s)" % (attr_name, value_var)
+
+
+def _setattr_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return "_setattr('%s', %s(%s))" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _assign(attr_name, value, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return "self.%s = %s" % (attr_name, value)
+
+
+def _assign_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True)
+
+ return "self.%s = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+if PY2:
+
+ def _unpack_kw_only_py2(attr_name, default=None):
+ """
+ Unpack *attr_name* from _kw_only dict.
+ """
+ if default is not None:
+ arg_default = ", %s" % default
+ else:
+ arg_default = ""
+ return "%s = _kw_only.pop('%s'%s)" % (
+ attr_name,
+ attr_name,
+ arg_default,
+ )
+
+ def _unpack_kw_only_lines_py2(kw_only_args):
+ """
+ Unpack all *kw_only_args* from _kw_only dict and handle errors.
+
+ Given a list of strings "{attr_name}" and "{attr_name}={default}"
+ generates list of lines of code that pop attrs from _kw_only dict and
+ raise TypeError similar to builtin if required attr is missing or
+ extra key is passed.
+
+ >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"])))
+ try:
+ a = _kw_only.pop('a')
+ b = _kw_only.pop('b', 42)
+ except KeyError as _key_error:
+ raise TypeError(
+ ...
+ if _kw_only:
+ raise TypeError(
+ ...
+ """
+ lines = ["try:"]
+ lines.extend(
+ " " + _unpack_kw_only_py2(*arg.split("="))
+ for arg in kw_only_args
+ )
+ lines += """\
+except KeyError as _key_error:
+ raise TypeError(
+ '__init__() missing required keyword-only argument: %s' % _key_error
+ )
+if _kw_only:
+ raise TypeError(
+ '__init__() got an unexpected keyword argument %r'
+ % next(iter(_kw_only))
+ )
+""".split(
+ "\n"
+ )
+ return lines
+
+
+def _attrs_to_init_script(
+ attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_global_on_setattr,
+ attrs_init,
+):
+ """
+ Return a script of an initializer for *attrs* and a dict of globals.
+
+ The globals are expected by the generated script.
+
+ If *frozen* is True, we cannot set the attributes directly so we use
+ a cached ``object.__setattr__``.
+ """
+ lines = []
+ if pre_init:
+ lines.append("self.__attrs_pre_init__()")
+
+ if needs_cached_setattr:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment.
+ # Note _setattr will be used again below if cache_hash is True
+ "_setattr = _cached_setattr.__get__(self, self.__class__)"
+ )
+
+ if frozen is True:
+ if slots is True:
+ fmt_setter = _setattr
+ fmt_setter_with_converter = _setattr_with_converter
+ else:
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
+
+ def fmt_setter(attr_name, value_var, has_on_setattr):
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return "_inst_dict['%s'] = %s" % (attr_name, value_var)
+
+ def fmt_setter_with_converter(
+ attr_name, value_var, has_on_setattr
+ ):
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr
+ )
+
+ return "_inst_dict['%s'] = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+ else:
+ # Not frozen.
+ fmt_setter = _assign
+ fmt_setter_with_converter = _assign_with_converter
+
+ args = []
+ kw_only_args = []
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_global_on_setattr
+ )
+ arg_name = a.name.lstrip("_")
+
+ has_factory = isinstance(a.default, Factory)
+ if has_factory and a.default.takes_self:
+ maybe_self = "self"
+ else:
+ maybe_self = ""
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = "%s=NOTHING" % (arg_name,)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append("if %s is not NOTHING:" % (arg_name,))
+
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and a.converter is None:
+ annotations[arg_name] = a.type
+ elif a.converter is not None and not PY2:
+ # Try to get the type from the converter.
+ sig = None
+ try:
+ sig = inspect.signature(a.converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ sig_params = list(sig.parameters.values())
+ if (
+ sig_params
+ and sig_params[0].annotation
+ is not inspect.Parameter.empty
+ ):
+ annotations[arg_name] = sig_params[0].annotation
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(
+ " %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
+ )
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # because this is set only after __attrs_post_init is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr('%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join("self." + a.name for a in attrs if a.init)
+
+ lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+ args = ", ".join(args)
+ if kw_only_args:
+ if PY2:
+ lines = _unpack_kw_only_lines_py2(kw_only_args) + lines
+
+ args += "%s**_kw_only" % (", " if args else "",) # leading comma
+ else:
+ args += "%s*, %s" % (
+ ", " if args else "", # leading comma
+ ", ".join(kw_only_args), # kw_only args
+ )
+ return (
+ """\
+def {init_name}(self, {args}):
+ {lines}
+""".format(
+ init_name=("__attrs_init__" if attrs_init else "__init__"),
+ args=args,
+ lines="\n ".join(lines) if lines else "pass",
+ ),
+ names_for_globals,
+ annotations,
+ )
+
+
+class Attribute(object):
+ """
+ *Read-only* representation of an attribute.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The *field transformer* hook receives a list of them.
+
+ :attribute name: The name of the attribute.
+ :attribute inherited: Whether or not that attribute has been inherited from
+ a base class.
+
+ Plus *all* arguments of `attr.ib` (except for ``factory``
+ which is only syntactic sugar for ``default=Factory(...)``.
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ __slots__ = (
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ metadata_proxy(metadata)
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError()
+
+ @classmethod
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ raise ValueError(
+ "Type annotation and type argument cannot both be present"
+ )
+ inst_dict = {
+ k: getattr(ca, k)
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "inherited",
+ ) # exclude methods and deprecated alias
+ }
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ cmp=None,
+ inherited=False,
+ **inst_dict
+ )
+
+ @property
+ def cmp(self):
+ """
+ Simulate the presence of a cmp attribute and warn.
+ """
+ warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2)
+
+ return self.eq and self.order
+
+ # Don't use attr.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attr.evolve` but that function does not work
+ with ``Attribute``.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ metadata_proxy(value)
+ if value
+ else _empty_metadata_singleton,
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr(object):
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ "on_setattr",
+ )
+ __attrs_attrs__ = tuple(
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ )
+ ) + (
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ :raises DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError()
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory(object):
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attr.ib`, the factory is used to
+ generate a new value.
+
+ :param callable factory: A callable that takes either none or exactly one
+ mandatory positional argument depending on *takes_self*.
+ :param bool takes_self: Pass the partially initialized instance that is
+ being initialized as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ """
+ `Factory` is part of the default machinery so if we want a default
+ value here, we have to implement it ourselves.
+ """
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+def make_class(name, attrs, bases=(object,), **attributes_arguments):
+ """
+ A quick way to create a new class called *name* with *attrs*.
+
+ :param str name: The name for the new class.
+
+ :param attrs: A list of names or a dictionary of mappings of names to
+ attributes.
+
+ If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the names or attributes inside *attrs*. Otherwise the
+ order of the definition of the attributes is used.
+ :type attrs: `list` or `dict`
+
+ :param tuple bases: Classes that the new class will subclass.
+
+ :param attributes_arguments: Passed unmodified to `attr.s`.
+
+ :return: A new class with *attrs*.
+ :rtype: type
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ """
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = dict((a, attrib()) for a in attrs)
+ else:
+ raise TypeError("attrs argument must be a dict or a list.")
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ try:
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+ except (AttributeError, ValueError):
+ pass
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator(object):
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ :param callables validators: Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if
+ they have any.
+
+ :param callables converters: Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def pipe_converter(val):
+ for converter in converters:
+ val = converter(val)
+
+ return val
+
+ if not PY2:
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = typing.TypeVar("A")
+ pipe_converter.__annotations__ = {"val": A, "return": A}
+ else:
+ # Get parameter type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[0])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if (
+ params
+ and params[0].annotation is not inspect.Parameter.empty
+ ):
+ pipe_converter.__annotations__["val"] = params[
+ 0
+ ].annotation
+ # Get return type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[-1])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig and sig.return_annotation is not inspect.Signature().empty:
+ pipe_converter.__annotations__[
+ "return"
+ ] = sig.return_annotation
+
+ return pipe_converter
diff --git a/lib/spack/external/attr/_next_gen.py b/lib/spack/external/attr/_next_gen.py
new file mode 100644
index 0000000000..fab0af966a
--- /dev/null
+++ b/lib/spack/external/attr/_next_gen.py
@@ -0,0 +1,158 @@
+"""
+These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
+`attr.ib` with different default values.
+"""
+
+from functools import partial
+
+from attr.exceptions import UnannotatedAttributeError
+
+from . import setters
+from ._make import NOTHING, _frozen_setattrs, attrib, attrs
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+):
+ r"""
+ The only behavioral differences are the handling of the *auto_attribs*
+ option:
+
+ :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
+ exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
+
+ 1. If any attributes are annotated and no unannotated `attr.ib`\ s
+ are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attr.ib`\ s.
+
+ and that mutable classes (``frozen=False``) validate on ``__setattr__``.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = setters.validate
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ raise ValueError(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ )
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Identical to `attr.ib`, except keyword-only and with some arguments
+ removed.
+
+ .. versionadded:: 20.1.0
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ )
diff --git a/lib/spack/external/attr/_version_info.py b/lib/spack/external/attr/_version_info.py
new file mode 100644
index 0000000000..014e78a1b4
--- /dev/null
+++ b/lib/spack/external/attr/_version_info.py
@@ -0,0 +1,85 @@
+from __future__ import absolute_import, division, print_function
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo(object):
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
diff --git a/lib/spack/external/attr/converters.py b/lib/spack/external/attr/converters.py
new file mode 100644
index 0000000000..2777db6d0a
--- /dev/null
+++ b/lib/spack/external/attr/converters.py
@@ -0,0 +1,111 @@
+"""
+Commonly useful converters.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import PY2
+from ._make import NOTHING, Factory, pipe
+
+
+if not PY2:
+ import inspect
+ import typing
+
+
+__all__ = [
+ "pipe",
+ "optional",
+ "default_if_none",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to ``None``.
+
+ Type annotations will be inferred from the wrapped converter's, if it
+ has any.
+
+ :param callable converter: the converter that is used for non-``None``
+ values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ if not PY2:
+ sig = None
+ try:
+ sig = inspect.signature(converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ optional_converter.__annotations__["val"] = typing.Optional[
+ params[0].annotation
+ ]
+ if sig.return_annotation is not inspect.Signature.empty:
+ optional_converter.__annotations__["return"] = typing.Optional[
+ sig.return_annotation
+ ]
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
+ of `attr.Factory` is supported, however the ``takes_self`` option
+ is *not*.
+ :param callable factory: A callable that takes no parameters whose result
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
+ :raises ValueError: If an instance of `attr.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
diff --git a/lib/spack/external/attr/exceptions.py b/lib/spack/external/attr/exceptions.py
new file mode 100644
index 0000000000..f6f9861bea
--- /dev/null
+++ b/lib/spack/external/attr/exceptions.py
@@ -0,0 +1,92 @@
+from __future__ import absolute_import, division, print_function
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An ``attrs`` function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-``attrs`` class has been passed into an ``attrs`` function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set using ``attr.ib()`` and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+ annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an ``attrs`` feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A ``attr.ib()`` requiring a callable has been set with a value
+ that is not callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git a/lib/spack/external/attr/filters.py b/lib/spack/external/attr/filters.py
new file mode 100644
index 0000000000..dc47e8fa38
--- /dev/null
+++ b/lib/spack/external/attr/filters.py
@@ -0,0 +1,52 @@
+"""
+Commonly useful filters for `attr.asdict`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import isclass
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isclass(cls)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Whitelist *what*.
+
+ :param what: What to whitelist.
+ :type what: `list` of `type` or `attr.Attribute`\\ s
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return value.__class__ in cls or attribute in attrs
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Blacklist *what*.
+
+ :param what: What to blacklist.
+ :type what: `list` of classes or `attr.Attribute`\\ s.
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return value.__class__ not in cls and attribute not in attrs
+
+ return exclude_
diff --git a/lib/spack/external/attr/setters.py b/lib/spack/external/attr/setters.py
new file mode 100644
index 0000000000..240014b3c1
--- /dev/null
+++ b/lib/spack/external/attr/setters.py
@@ -0,0 +1,77 @@
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ return c(new_value)
+
+ return new_value
+
+
+NO_OP = object()
+"""
+Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+
+Does not work in `pipe` or within lists.
+
+.. versionadded:: 20.1.0
+"""
diff --git a/lib/spack/external/attr/validators.py b/lib/spack/external/attr/validators.py
new file mode 100644
index 0000000000..b9a73054e9
--- /dev/null
+++ b/lib/spack/external/attr/validators.py
@@ -0,0 +1,379 @@
+"""
+Commonly useful validators.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import re
+
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "matches_re",
+ "optional",
+ "provides",
+]
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator(object):
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ raise TypeError(
+ "'{name}' must be {type!r} (got {value!r} that is a "
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return "<instance_of validator for type {type!r}>".format(
+ type=self.type
+ )
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with a wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ :param type: The type to check for.
+ :type type: type or tuple of types
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attr.Attribute`), the expected type, and the value it
+ got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator(object):
+ regex = attrib()
+ flags = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ raise ValueError(
+ "'{name}' must match regex {regex!r}"
+ " ({value!r} doesn't)".format(
+ name=attr.name, regex=self.regex.pattern, value=value
+ ),
+ attr,
+ self.regex,
+ value,
+ )
+
+ def __repr__(self):
+ return "<matches_re validator for pattern {regex!r}>".format(
+ regex=self.regex
+ )
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called
+ with a string that doesn't match *regex*.
+
+ :param str regex: a regex string to match against
+ :param int flags: flags that will be passed to the underlying re function
+ (default 0)
+ :param callable func: which underlying `re` function to call (options
+ are `re.fullmatch`, `re.search`, `re.match`, default
+ is ``None`` which means either `re.fullmatch` or an emulation of
+ it on Python 2). For performance reasons, they won't be used directly
+ but on a pre-`re.compile`\ ed pattern.
+
+ .. versionadded:: 19.2.0
+ """
+ fullmatch = getattr(re, "fullmatch", None)
+ valid_funcs = (fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ raise ValueError(
+ "'func' must be one of %s."
+ % (
+ ", ".join(
+ sorted(
+ e and e.__name__ or "None" for e in set(valid_funcs)
+ )
+ ),
+ )
+ )
+
+ pattern = re.compile(regex, flags)
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ else:
+ if fullmatch:
+ match_func = pattern.fullmatch
+ else:
+ pattern = re.compile(r"(?:{})\Z".format(regex), flags)
+ match_func = pattern.match
+
+ return _MatchesReValidator(pattern, flags, match_func)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator(object):
+ interface = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.interface.providedBy(value):
+ raise TypeError(
+ "'{name}' must provide {interface!r} which {value!r} "
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
+ )
+
+ def __repr__(self):
+ return "<provides validator for interface {interface!r}>".format(
+ interface=self.interface
+ )
+
+
+def provides(interface):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with an object that does not provide the requested *interface* (checks are
+ performed using ``interface.providedBy(value)`` (see `zope.interface
+ <https://zopeinterface.readthedocs.io/en/latest/>`_).
+
+ :param interface: The interface to check for.
+ :type interface: ``zope.interface.Interface``
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attr.Attribute`), the expected interface, and the
+ value it got.
+ """
+ return _ProvidesValidator(interface)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator(object):
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return "<optional validator for {what} or None>".format(
+ what=repr(self.validator)
+ )
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to ``None`` in addition to satisfying the requirements of
+ the sub-validator.
+
+ :param validator: A validator (or a list of validators) that is used for
+ non-``None`` values.
+ :type validator: callable or `list` of callables.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ """
+ if isinstance(validator, list):
+ return _OptionalValidator(_AndValidator(validator))
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator(object):
+ options = attrib()
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ raise ValueError(
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ )
+ )
+
+ def __repr__(self):
+ return "<in_ validator with options {options!r}>".format(
+ options=self.options
+ )
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called
+ with a value that does not belong in the options provided. The check is
+ performed using ``value in options``.
+
+ :param options: Allowed options.
+ :type options: list, tuple, `enum.Enum`, ...
+
+ :raises ValueError: With a human readable error message, the attribute (of
+ type `attr.Attribute`), the expected options, and the value it
+ got.
+
+ .. versionadded:: 17.1.0
+ """
+ return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator(object):
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return "<is_callable validator>"
+
+
+def is_callable():
+ """
+ A validator that raises a `attr.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute
+ that is not callable.
+
+ .. versionadded:: 19.1.0
+
+ :raises `attr.exceptions.NotCallableError`: With a human readable error
+ message containing the attribute (`attr.Attribute`) name,
+ and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable(object):
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else " {iterable!r}".format(iterable=self.iterable_validator)
+ )
+ return (
+ "<deep_iterable validator for{iterable_identifier}"
+ " iterables of {member!r}>"
+ ).format(
+ iterable_identifier=iterable_identifier,
+ member=self.member_validator,
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ :param member_validator: Validator to apply to iterable members
+ :param iterable_validator: Validator to apply to iterable itself
+ (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping(object):
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return (
+ "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
+ ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ :param key_validator: Validator to apply to dictionary keys
+ :param value_validator: Validator to apply to dictionary values
+ :param mapping_validator: Validator to apply to top-level mapping
+ attribute (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
diff --git a/lib/spack/external/ctest_log_parser.py b/lib/spack/external/ctest_log_parser.py
index 61cc6df029..2b2746003a 100644
--- a/lib/spack/external/ctest_log_parser.py
+++ b/lib/spack/external/ctest_log_parser.py
@@ -150,8 +150,6 @@ _error_exceptions = [
": note",
" ok",
"Note:",
- "makefile:",
- "Makefile:",
":[ \\t]+Where:",
"[^ :]:[0-9]+: Warning",
"------ Build started: .* ------",
@@ -189,8 +187,6 @@ _warning_exceptions = [
"/usr/.*/X11/XResource\\.h:[0-9]+: war.*: ANSI C\\+\\+ forbids declaration",
"WARNING 84 :",
"WARNING 47 :",
- "makefile:",
- "Makefile:",
"warning: Clock skew detected. Your build may be incomplete.",
"/usr/openwin/include/GL/[^:]+:",
"bind_at_load",
diff --git a/lib/spack/external/distro.py b/lib/spack/external/distro.py
index e3805de75f..7892741347 100644
--- a/lib/spack/external/distro.py
+++ b/lib/spack/external/distro.py
@@ -1,4 +1,4 @@
-# Copyright 2015,2016 Nir Cohen
+# Copyright 2015,2016,2017 Nir Cohen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,32 +17,64 @@ The ``distro`` package (``distro`` stands for Linux Distribution) provides
information about the Linux distribution it runs on, such as a reliable
machine-readable distro ID, or version information.
-It is a renewed alternative implementation for Python's original
+It is the recommended replacement for Python's original
:py:func:`platform.linux_distribution` function, but it provides much more
functionality. An alternative implementation became necessary because Python
-3.5 deprecated this function, and Python 3.7 is expected to remove it
-altogether. Its predecessor function :py:func:`platform.dist` was already
-deprecated since Python 2.6 and is also expected to be removed in Python 3.7.
-Still, there are many cases in which access to Linux distribution information
-is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
-more information.
+3.5 deprecated this function, and Python 3.8 removed it altogether. Its
+predecessor function :py:func:`platform.dist` was already deprecated since
+Python 2.6 and removed in Python 3.8. Still, there are many cases in which
+access to OS distribution information is needed. See `Python issue 1322
+<https://bugs.python.org/issue1322>`_ for more information.
"""
+import argparse
+import json
+import logging
import os
import re
-import sys
-import json
import shlex
-import logging
-import argparse
import subprocess
+import sys
+import warnings
+
+__version__ = "1.6.0"
+
+# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2
+# support, can use typing.TYPE_CHECKING instead. See:
+# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING
+if False: # pragma: nocover
+ from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ Optional,
+ Sequence,
+ TextIO,
+ Tuple,
+ Type,
+ TypedDict,
+ Union,
+ )
+
+ VersionDict = TypedDict(
+ "VersionDict", {"major": str, "minor": str, "build_number": str}
+ )
+ InfoDict = TypedDict(
+ "InfoDict",
+ {
+ "id": str,
+ "version": str,
+ "version_parts": VersionDict,
+ "like": str,
+ "codename": str,
+ },
+ )
-if not sys.platform.startswith('linux'):
- raise ImportError('Unsupported platform: {0}'.format(sys.platform))
-
-_UNIXCONFDIR = os.environ.get('UNIXCONFDIR', '/etc')
-_OS_RELEASE_BASENAME = 'os-release'
+_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
+_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
+_OS_RELEASE_BASENAME = "os-release"
#: Translation table for normalizing the "ID" attribute defined in os-release
#: files, for use by the :func:`distro.id` method.
@@ -51,7 +83,9 @@ _OS_RELEASE_BASENAME = 'os-release'
#: with blanks translated to underscores.
#:
#: * Value: Normalized value.
-NORMALIZED_OS_ID = {}
+NORMALIZED_OS_ID = {
+ "ol": "oracle", # Oracle Linux
+}
#: Translation table for normalizing the "Distributor ID" attribute returned by
#: the lsb_release command, for use by the :func:`distro.id` method.
@@ -61,10 +95,11 @@ NORMALIZED_OS_ID = {}
#:
#: * Value: Normalized value.
NORMALIZED_LSB_ID = {
- 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux
- 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation
- 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server
- 'redhatenterprisecomputenode': 'rhel', # RHEL 6 ComputeNode
+ "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
+ "enterpriseenterpriseserver": "oracle", # Oracle Linux 5
+ "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
+ "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
+ "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
}
#: Translation table for normalizing the distro ID derived from the file name
@@ -75,30 +110,40 @@ NORMALIZED_LSB_ID = {
#:
#: * Value: Normalized value.
NORMALIZED_DISTRO_ID = {
- 'redhat': 'rhel', # RHEL 6.x, 7.x
+ "redhat": "rhel", # RHEL 6.x, 7.x
}
# Pattern for content of distro release file (reversed)
_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
- r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)')
+ r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
+)
# Pattern for base file name of distro release file
-_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(
- r'(\w+)[-_](release|version)$')
+_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
# Base file names to be ignored when searching for distro release file
_DISTRO_RELEASE_IGNORE_BASENAMES = (
- 'debian_version',
- 'lsb-release',
- 'oem-release',
+ "debian_version",
+ "lsb-release",
+ "oem-release",
_OS_RELEASE_BASENAME,
- 'system-release'
+ "system-release",
+ "plesk-release",
+ "iredmail-release",
)
def linux_distribution(full_distribution_name=True):
+ # type: (bool) -> Tuple[str, str, str]
"""
- Return information about the current Linux distribution as a tuple
+ .. deprecated:: 1.6.0
+
+ :func:`distro.linux_distribution()` is deprecated. It should only be
+ used as a compatibility shim with Python's
+ :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
+ :func:`distro.version` and :func:`distro.name` instead.
+
+ Return information about the current OS distribution as a tuple
``(id_name, version, codename)`` with items as follows:
* ``id_name``: If *full_distribution_name* is false, the result of
@@ -114,22 +159,30 @@ def linux_distribution(full_distribution_name=True):
The data it returns may not exactly be the same, because it uses more data
sources than the original function, and that may lead to different data if
- the Linux distribution is not consistent across multiple data sources it
+ the OS distribution is not consistent across multiple data sources it
provides (there are indeed such distributions ...).
Another reason for differences is the fact that the :func:`distro.id`
method normalizes the distro ID string to a reliable machine-readable value
- for a number of popular Linux distributions.
+ for a number of popular OS distributions.
"""
+ warnings.warn(
+ "distro.linux_distribution() is deprecated. It should only be used as a "
+ "compatibility shim with Python's platform.linux_distribution(). Please use "
+ "distro.id(), distro.version() and distro.name() instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
return _distro.linux_distribution(full_distribution_name)
def id():
+ # type: () -> str
"""
- Return the distro ID of the current Linux distribution, as a
+ Return the distro ID of the current distribution, as a
machine-readable string.
- For a number of Linux distributions, the returned distro ID value is
+ For a number of OS distributions, the returned distro ID value is
*reliable*, in the sense that it is documented and that it does not change
across releases of the distribution.
@@ -162,6 +215,10 @@ def id():
"scientific" Scientific Linux
"slackware" Slackware
"xenserver" XenServer
+ "openbsd" OpenBSD
+ "netbsd" NetBSD
+ "freebsd" FreeBSD
+ "midnightbsd" MidnightBSD
============== =========================================
If you have a need to get distros for reliable IDs added into this set,
@@ -191,7 +248,7 @@ def id():
* a normalization of the ID is performed, based upon
`normalization tables`_. The purpose of this normalization is to ensure
that the ID is as reliable as possible, even across incompatible changes
- in the Linux distributions. A common reason for an incompatible change is
+ in the OS distributions. A common reason for an incompatible change is
the addition of an os-release file, or the addition of the lsb_release
command, with ID values that differ from what was previously determined
from the distro release file name.
@@ -200,8 +257,9 @@ def id():
def name(pretty=False):
+ # type: (bool) -> str
"""
- Return the name of the current Linux distribution, as a human-readable
+ Return the name of the current OS distribution, as a human-readable
string.
If *pretty* is false, the name is returned without version or codename.
@@ -239,8 +297,9 @@ def name(pretty=False):
def version(pretty=False, best=False):
+ # type: (bool, bool) -> str
"""
- Return the version of the current Linux distribution, as a human-readable
+ Return the version of the current OS distribution, as a human-readable
string.
If *pretty* is false, the version is returned without codename (e.g.
@@ -283,8 +342,9 @@ def version(pretty=False, best=False):
def version_parts(best=False):
+ # type: (bool) -> Tuple[str, str, str]
"""
- Return the version of the current Linux distribution as a tuple
+ Return the version of the current OS distribution as a tuple
``(major, minor, build_number)`` with items as follows:
* ``major``: The result of :func:`distro.major_version`.
@@ -300,8 +360,9 @@ def version_parts(best=False):
def major_version(best=False):
+ # type: (bool) -> str
"""
- Return the major version of the current Linux distribution, as a string,
+ Return the major version of the current OS distribution, as a string,
if provided.
Otherwise, the empty string is returned. The major version is the first
part of the dot-separated version string.
@@ -313,8 +374,9 @@ def major_version(best=False):
def minor_version(best=False):
+ # type: (bool) -> str
"""
- Return the minor version of the current Linux distribution, as a string,
+ Return the minor version of the current OS distribution, as a string,
if provided.
Otherwise, the empty string is returned. The minor version is the second
part of the dot-separated version string.
@@ -326,8 +388,9 @@ def minor_version(best=False):
def build_number(best=False):
+ # type: (bool) -> str
"""
- Return the build number of the current Linux distribution, as a string,
+ Return the build number of the current OS distribution, as a string,
if provided.
Otherwise, the empty string is returned. The build number is the third part
of the dot-separated version string.
@@ -339,9 +402,10 @@ def build_number(best=False):
def like():
+ # type: () -> str
"""
Return a space-separated list of distro IDs of distributions that are
- closely related to the current Linux distribution in regards to packaging
+ closely related to the current OS distribution in regards to packaging
and programming interfaces, for example distributions the current
distribution is a derivative from.
@@ -356,8 +420,9 @@ def like():
def codename():
+ # type: () -> str
"""
- Return the codename for the release of the current Linux distribution,
+ Return the codename for the release of the current OS distribution,
as a string.
If the distribution does not have a codename, an empty string is returned.
@@ -380,8 +445,9 @@ def codename():
def info(pretty=False, best=False):
+ # type: (bool, bool) -> InfoDict
"""
- Return certain machine-readable information items about the current Linux
+ Return certain machine-readable information items about the current OS
distribution in a dictionary, as shown in the following example:
.. sourcecode:: python
@@ -424,9 +490,10 @@ def info(pretty=False, best=False):
def os_release_info():
+ # type: () -> Dict[str, str]
"""
Return a dictionary containing key-value pairs for the information items
- from the os-release file data source of the current Linux distribution.
+ from the os-release file data source of the current OS distribution.
See `os-release file`_ for details about these information items.
"""
@@ -434,9 +501,10 @@ def os_release_info():
def lsb_release_info():
+ # type: () -> Dict[str, str]
"""
Return a dictionary containing key-value pairs for the information items
- from the lsb_release command data source of the current Linux distribution.
+ from the lsb_release command data source of the current OS distribution.
See `lsb_release command output`_ for details about these information
items.
@@ -445,19 +513,30 @@ def lsb_release_info():
def distro_release_info():
+ # type: () -> Dict[str, str]
"""
Return a dictionary containing key-value pairs for the information items
- from the distro release file data source of the current Linux distribution.
+ from the distro release file data source of the current OS distribution.
See `distro release file`_ for details about these information items.
"""
return _distro.distro_release_info()
+def uname_info():
+ # type: () -> Dict[str, str]
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the distro release file data source of the current OS distribution.
+ """
+ return _distro.uname_info()
+
+
def os_release_attr(attribute):
+ # type: (str) -> str
"""
Return a single named information item from the os-release file data source
- of the current Linux distribution.
+ of the current OS distribution.
Parameters:
@@ -474,9 +553,10 @@ def os_release_attr(attribute):
def lsb_release_attr(attribute):
+ # type: (str) -> str
"""
Return a single named information item from the lsb_release command output
- data source of the current Linux distribution.
+ data source of the current OS distribution.
Parameters:
@@ -494,9 +574,10 @@ def lsb_release_attr(attribute):
def distro_release_attr(attribute):
+ # type: (str) -> str
"""
Return a single named information item from the distro release file
- data source of the current Linux distribution.
+ data source of the current OS distribution.
Parameters:
@@ -512,15 +593,55 @@ def distro_release_attr(attribute):
return _distro.distro_release_attr(attribute)
+def uname_attr(attribute):
+ # type: (str) -> str
+ """
+ Return a single named information item from the distro release file
+ data source of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+ """
+ return _distro.uname_attr(attribute)
+
+
+try:
+ from functools import cached_property
+except ImportError:
+ # Python < 3.8
+ class cached_property(object): # type: ignore
+ """A version of @property which caches the value. On access, it calls the
+ underlying function and sets the value in `__dict__` so future accesses
+ will not re-call the property.
+ """
+
+ def __init__(self, f):
+ # type: (Callable[[Any], Any]) -> None
+ self._fname = f.__name__
+ self._f = f
+
+ def __get__(self, obj, owner):
+ # type: (Any, Type[Any]) -> Any
+ assert obj is not None, "call {} on an instance".format(self._fname)
+ ret = obj.__dict__[self._fname] = self._f(obj)
+ return ret
+
+
class LinuxDistribution(object):
"""
- Provides information about a Linux distribution.
+ Provides information about a OS distribution.
This package creates a private module-global instance of this class with
default initialization arguments, that is used by the
`consolidated accessor functions`_ and `single source accessor functions`_.
By using default initialization arguments, that module-global instance
- returns data about the current Linux distribution (i.e. the distro this
+ returns data about the current OS distribution (i.e. the distro this
package runs on).
Normally, it is not necessary to create additional instances of this class.
@@ -530,10 +651,15 @@ class LinuxDistribution(object):
lsb_release command.
"""
- def __init__(self,
- include_lsb=True,
- os_release_file='',
- distro_release_file=''):
+ def __init__(
+ self,
+ include_lsb=True,
+ os_release_file="",
+ distro_release_file="",
+ include_uname=True,
+ root_dir=None,
+ ):
+ # type: (bool, str, str, bool, Optional[str]) -> None
"""
The initialization method of this class gathers information from the
available data sources, and stores that in private instance attributes.
@@ -567,6 +693,14 @@ class LinuxDistribution(object):
distro release file can be found, the data source for the distro
release file will be empty.
+ * ``include_uname`` (bool): Controls whether uname command output is
+ included as a data source. If the uname command is not available in
+ the program execution path the data source for the uname command will
+ be empty.
+
+ * ``root_dir`` (string): The absolute path to the root directory to use
+ to find distro-related information files.
+
Public instance attributes:
* ``os_release_file`` (string): The path name of the
@@ -577,6 +711,13 @@ class LinuxDistribution(object):
`distro release file`_ that is actually used as a data source. The
empty string if no distro release file is used as a data source.
+ * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
+ This controls whether the lsb information will be loaded.
+
+ * ``include_uname`` (bool): The result of the ``include_uname``
+ parameter. This controls whether the uname information will
+ be loaded.
+
Raises:
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
@@ -589,33 +730,52 @@ class LinuxDistribution(object):
* :py:exc:`UnicodeError`: A data source has unexpected characters or
uses an unexpected encoding.
"""
- self.os_release_file = os_release_file or \
- os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)
- self.distro_release_file = distro_release_file or '' # updated later
- self._os_release_info = self._get_os_release_info()
- self._lsb_release_info = self._get_lsb_release_info() \
- if include_lsb else {}
- self._distro_release_info = self._get_distro_release_info()
+ self.root_dir = root_dir
+ self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
+ self.usr_lib_dir = (
+ os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
+ )
+
+ if os_release_file:
+ self.os_release_file = os_release_file
+ else:
+ etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
+ usr_lib_os_release_file = os.path.join(
+ self.usr_lib_dir, _OS_RELEASE_BASENAME
+ )
+
+ # NOTE: The idea is to respect order **and** have it set
+ # at all times for API backwards compatibility.
+ if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
+ usr_lib_os_release_file
+ ):
+ self.os_release_file = etc_dir_os_release_file
+ else:
+ self.os_release_file = usr_lib_os_release_file
+
+ self.distro_release_file = distro_release_file or "" # updated later
+ self.include_lsb = include_lsb
+ self.include_uname = include_uname
def __repr__(self):
- """Return repr of all info
- """
- return \
- "LinuxDistribution(" \
- "os_release_file={0!r}, " \
- "distro_release_file={1!r}, " \
- "_os_release_info={2!r}, " \
- "_lsb_release_info={3!r}, " \
- "_distro_release_info={4!r})".format(
- self.os_release_file,
- self.distro_release_file,
- self._os_release_info,
- self._lsb_release_info,
- self._distro_release_info)
+ # type: () -> str
+ """Return repr of all info"""
+ return (
+ "LinuxDistribution("
+ "os_release_file={self.os_release_file!r}, "
+ "distro_release_file={self.distro_release_file!r}, "
+ "include_lsb={self.include_lsb!r}, "
+ "include_uname={self.include_uname!r}, "
+ "_os_release_info={self._os_release_info!r}, "
+ "_lsb_release_info={self._lsb_release_info!r}, "
+ "_distro_release_info={self._distro_release_info!r}, "
+ "_uname_info={self._uname_info!r})".format(self=self)
+ )
def linux_distribution(self, full_distribution_name=True):
+ # type: (bool) -> Tuple[str, str, str]
"""
- Return information about the Linux distribution that is compatible
+ Return information about the OS distribution that is compatible
with Python's :func:`platform.linux_distribution`, supporting a subset
of its parameters.
@@ -624,101 +784,119 @@ class LinuxDistribution(object):
return (
self.name() if full_distribution_name else self.id(),
self.version(),
- self.codename()
+ self.codename(),
)
def id(self):
- """Return the distro ID of the Linux distribution, as a string.
+ # type: () -> str
+ """Return the distro ID of the OS distribution, as a string.
For details, see :func:`distro.id`.
"""
+
def normalize(distro_id, table):
- distro_id = distro_id.lower().replace(' ', '_')
+ # type: (str, Dict[str, str]) -> str
+ distro_id = distro_id.lower().replace(" ", "_")
return table.get(distro_id, distro_id)
- distro_id = self.os_release_attr('id')
+ distro_id = self.os_release_attr("id")
if distro_id:
return normalize(distro_id, NORMALIZED_OS_ID)
- distro_id = self.lsb_release_attr('distributor_id')
+ distro_id = self.lsb_release_attr("distributor_id")
if distro_id:
return normalize(distro_id, NORMALIZED_LSB_ID)
- distro_id = self.distro_release_attr('id')
+ distro_id = self.distro_release_attr("id")
if distro_id:
return normalize(distro_id, NORMALIZED_DISTRO_ID)
- return ''
+ distro_id = self.uname_attr("id")
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+ return ""
def name(self, pretty=False):
+ # type: (bool) -> str
"""
- Return the name of the Linux distribution, as a string.
+ Return the name of the OS distribution, as a string.
For details, see :func:`distro.name`.
"""
- name = self.os_release_attr('name') \
- or self.lsb_release_attr('distributor_id') \
- or self.distro_release_attr('name')
+ name = (
+ self.os_release_attr("name")
+ or self.lsb_release_attr("distributor_id")
+ or self.distro_release_attr("name")
+ or self.uname_attr("name")
+ )
if pretty:
- name = self.os_release_attr('pretty_name') \
- or self.lsb_release_attr('description')
+ name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
+ "description"
+ )
if not name:
- name = self.distro_release_attr('name')
+ name = self.distro_release_attr("name") or self.uname_attr("name")
version = self.version(pretty=True)
if version:
- name = name + ' ' + version
- return name or ''
+ name = name + " " + version
+ return name or ""
def version(self, pretty=False, best=False):
+ # type: (bool, bool) -> str
"""
- Return the version of the Linux distribution, as a string.
+ Return the version of the OS distribution, as a string.
For details, see :func:`distro.version`.
"""
versions = [
- self.os_release_attr('version_id'),
- self.lsb_release_attr('release'),
- self.distro_release_attr('version_id'),
- self._parse_distro_release_content(
- self.os_release_attr('pretty_name')).get('version_id', ''),
+ self.os_release_attr("version_id"),
+ self.lsb_release_attr("release"),
+ self.distro_release_attr("version_id"),
+ self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
+ "version_id", ""
+ ),
self._parse_distro_release_content(
- self.lsb_release_attr('description')).get('version_id', '')
+ self.lsb_release_attr("description")
+ ).get("version_id", ""),
+ self.uname_attr("release"),
]
- version = ''
+ version = ""
if best:
# This algorithm uses the last version in priority order that has
# the best precision. If the versions are not in conflict, that
# does not matter; otherwise, using the last one instead of the
# first one might be considered a surprise.
for v in versions:
- if v.count(".") > version.count(".") or version == '':
+ if v.count(".") > version.count(".") or version == "":
version = v
else:
for v in versions:
- if v != '':
+ if v != "":
version = v
break
if pretty and version and self.codename():
- version = u'{0} ({1})'.format(version, self.codename())
+ version = "{0} ({1})".format(version, self.codename())
return version
def version_parts(self, best=False):
+ # type: (bool) -> Tuple[str, str, str]
"""
- Return the version of the Linux distribution, as a tuple of version
+ Return the version of the OS distribution, as a tuple of version
numbers.
For details, see :func:`distro.version_parts`.
"""
version_str = self.version(best=best)
if version_str:
- version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?')
+ version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
matches = version_regex.match(version_str)
if matches:
major, minor, build_number = matches.groups()
- return major, minor or '', build_number or ''
- return '', '', ''
+ return major, minor or "", build_number or ""
+ return "", "", ""
def major_version(self, best=False):
+ # type: (bool) -> str
"""
Return the major version number of the current distribution.
@@ -727,43 +905,54 @@ class LinuxDistribution(object):
return self.version_parts(best)[0]
def minor_version(self, best=False):
+ # type: (bool) -> str
"""
- Return the minor version number of the Linux distribution.
+ Return the minor version number of the current distribution.
For details, see :func:`distro.minor_version`.
"""
return self.version_parts(best)[1]
def build_number(self, best=False):
+ # type: (bool) -> str
"""
- Return the build number of the Linux distribution.
+ Return the build number of the current distribution.
For details, see :func:`distro.build_number`.
"""
return self.version_parts(best)[2]
def like(self):
+ # type: () -> str
"""
- Return the IDs of distributions that are like the Linux distribution.
+ Return the IDs of distributions that are like the OS distribution.
For details, see :func:`distro.like`.
"""
- return self.os_release_attr('id_like') or ''
+ return self.os_release_attr("id_like") or ""
def codename(self):
+ # type: () -> str
"""
- Return the codename of the Linux distribution.
+ Return the codename of the OS distribution.
For details, see :func:`distro.codename`.
"""
- return self.os_release_attr('codename') \
- or self.lsb_release_attr('codename') \
- or self.distro_release_attr('codename') \
- or ''
+ try:
+ # Handle os_release specially since distros might purposefully set
+ # this to empty string to have no codename
+ return self._os_release_info["codename"]
+ except KeyError:
+ return (
+ self.lsb_release_attr("codename")
+ or self.distro_release_attr("codename")
+ or ""
+ )
def info(self, pretty=False, best=False):
+ # type: (bool, bool) -> InfoDict
"""
- Return certain machine-readable information about the Linux
+ Return certain machine-readable information about the OS
distribution.
For details, see :func:`distro.info`.
@@ -774,25 +963,27 @@ class LinuxDistribution(object):
version_parts=dict(
major=self.major_version(best),
minor=self.minor_version(best),
- build_number=self.build_number(best)
+ build_number=self.build_number(best),
),
like=self.like(),
codename=self.codename(),
)
def os_release_info(self):
+ # type: () -> Dict[str, str]
"""
Return a dictionary containing key-value pairs for the information
- items from the os-release file data source of the Linux distribution.
+ items from the os-release file data source of the OS distribution.
For details, see :func:`distro.os_release_info`.
"""
return self._os_release_info
def lsb_release_info(self):
+ # type: () -> Dict[str, str]
"""
Return a dictionary containing key-value pairs for the information
- items from the lsb_release command data source of the Linux
+ items from the lsb_release command data source of the OS
distribution.
For details, see :func:`distro.lsb_release_info`.
@@ -800,43 +991,69 @@ class LinuxDistribution(object):
return self._lsb_release_info
def distro_release_info(self):
+ # type: () -> Dict[str, str]
"""
Return a dictionary containing key-value pairs for the information
- items from the distro release file data source of the Linux
+ items from the distro release file data source of the OS
distribution.
For details, see :func:`distro.distro_release_info`.
"""
return self._distro_release_info
+ def uname_info(self):
+ # type: () -> Dict[str, str]
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the uname command data source of the OS distribution.
+
+ For details, see :func:`distro.uname_info`.
+ """
+ return self._uname_info
+
def os_release_attr(self, attribute):
+ # type: (str) -> str
"""
Return a single named information item from the os-release file data
- source of the Linux distribution.
+ source of the OS distribution.
For details, see :func:`distro.os_release_attr`.
"""
- return self._os_release_info.get(attribute, '')
+ return self._os_release_info.get(attribute, "")
def lsb_release_attr(self, attribute):
+ # type: (str) -> str
"""
Return a single named information item from the lsb_release command
- output data source of the Linux distribution.
+ output data source of the OS distribution.
For details, see :func:`distro.lsb_release_attr`.
"""
- return self._lsb_release_info.get(attribute, '')
+ return self._lsb_release_info.get(attribute, "")
def distro_release_attr(self, attribute):
+ # type: (str) -> str
"""
Return a single named information item from the distro release file
- data source of the Linux distribution.
+ data source of the OS distribution.
For details, see :func:`distro.distro_release_attr`.
"""
- return self._distro_release_info.get(attribute, '')
+ return self._distro_release_info.get(attribute, "")
- def _get_os_release_info(self):
+ def uname_attr(self, attribute):
+ # type: (str) -> str
+ """
+ Return a single named information item from the uname command
+ output data source of the OS distribution.
+
+ For details, see :func:`distro.uname_attr`.
+ """
+ return self._uname_info.get(attribute, "")
+
+ @cached_property
+ def _os_release_info(self):
+ # type: () -> Dict[str, str]
"""
Get the information items from the specified os-release file.
@@ -850,6 +1067,7 @@ class LinuxDistribution(object):
@staticmethod
def _parse_os_release_content(lines):
+ # type: (TextIO) -> Dict[str, str]
"""
Parse the lines of an os-release file.
@@ -874,7 +1092,7 @@ class LinuxDistribution(object):
# parsed content is a unicode object. The following fix resolves that
# (... but it should be fixed in shlex...):
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
- lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
+ lexer.wordchars = lexer.wordchars.decode("iso-8859-1")
tokens = list(lexer)
for token in tokens:
@@ -884,61 +1102,59 @@ class LinuxDistribution(object):
# stripped, etc.), so the tokens are now either:
# * variable assignments: var=value
# * commands or their arguments (not allowed in os-release)
- if '=' in token:
- k, v = token.split('=', 1)
- if isinstance(v, bytes):
- v = v.decode('utf-8')
+ if "=" in token:
+ k, v = token.split("=", 1)
props[k.lower()] = v
- if k == 'VERSION':
- # this handles cases in which the codename is in
- # the `(CODENAME)` (rhel, centos, fedora) format
- # or in the `, CODENAME` format (Ubuntu).
- codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v)
- if codename:
- codename = codename.group()
- codename = codename.strip('()')
- codename = codename.strip(',')
- codename = codename.strip()
- # codename appears within paranthese.
- props['codename'] = codename
- else:
- props['codename'] = ''
else:
# Ignore any tokens that are not variable assignments
pass
+
+ if "version_codename" in props:
+ # os-release added a version_codename field. Use that in
+ # preference to anything else Note that some distros purposefully
+ # do not have code names. They should be setting
+ # version_codename=""
+ props["codename"] = props["version_codename"]
+ elif "ubuntu_codename" in props:
+ # Same as above but a non-standard field name used on older Ubuntus
+ props["codename"] = props["ubuntu_codename"]
+ elif "version" in props:
+ # If there is no version_codename, parse it from the version
+ match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"])
+ if match:
+ codename = match.group()
+ codename = codename.strip("()")
+ codename = codename.strip(",")
+ codename = codename.strip()
+ # codename appears within paranthese.
+ props["codename"] = codename
+
return props
- def _get_lsb_release_info(self):
+ @cached_property
+ def _lsb_release_info(self):
+ # type: () -> Dict[str, str]
"""
Get the information items from the lsb_release command output.
Returns:
A dictionary containing all information items.
"""
- cmd = 'lsb_release -a'
- process = subprocess.Popen(
- cmd,
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = process.communicate()
- stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8')
- code = process.returncode
- if code == 0:
- content = stdout.splitlines()
- return self._parse_lsb_release_content(content)
- elif code == 127: # Command not found
+ if not self.include_lsb:
return {}
- else:
- if sys.version_info[:2] >= (3, 5):
- raise subprocess.CalledProcessError(code, cmd, stdout, stderr)
- elif sys.version_info[:2] >= (2, 7):
- raise subprocess.CalledProcessError(code, cmd, stdout)
- elif sys.version_info[:2] == (2, 6):
- raise subprocess.CalledProcessError(code, cmd)
+ with open(os.devnull, "wb") as devnull:
+ try:
+ cmd = ("lsb_release", "-a")
+ stdout = subprocess.check_output(cmd, stderr=devnull)
+ # Command not found or lsb_release returned error
+ except (OSError, subprocess.CalledProcessError):
+ return {}
+ content = self._to_str(stdout).splitlines()
+ return self._parse_lsb_release_content(content)
@staticmethod
def _parse_lsb_release_content(lines):
+ # type: (Iterable[str]) -> Dict[str, str]
"""
Parse the output of the lsb_release command.
@@ -953,16 +1169,62 @@ class LinuxDistribution(object):
"""
props = {}
for line in lines:
- line = line.decode('utf-8') if isinstance(line, bytes) else line
- kv = line.strip('\n').split(':', 1)
+ kv = line.strip("\n").split(":", 1)
if len(kv) != 2:
# Ignore lines without colon.
continue
k, v = kv
- props.update({k.replace(' ', '_').lower(): v.strip()})
+ props.update({k.replace(" ", "_").lower(): v.strip()})
+ return props
+
+ @cached_property
+ def _uname_info(self):
+ # type: () -> Dict[str, str]
+ with open(os.devnull, "wb") as devnull:
+ try:
+ cmd = ("uname", "-rs")
+ stdout = subprocess.check_output(cmd, stderr=devnull)
+ except OSError:
+ return {}
+ content = self._to_str(stdout).splitlines()
+ return self._parse_uname_content(content)
+
+ @staticmethod
+ def _parse_uname_content(lines):
+ # type: (Sequence[str]) -> Dict[str, str]
+ props = {}
+ match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
+ if match:
+ name, version = match.groups()
+
+ # This is to prevent the Linux kernel version from
+ # appearing as the 'best' version on otherwise
+ # identifiable distributions.
+ if name == "Linux":
+ return {}
+ props["id"] = name.lower()
+ props["name"] = name
+ props["release"] = version
return props
- def _get_distro_release_info(self):
+ @staticmethod
+ def _to_str(text):
+ # type: (Union[bytes, str]) -> str
+ encoding = sys.getfilesystemencoding()
+ encoding = "utf-8" if encoding == "ascii" else encoding
+
+ if sys.version_info[0] >= 3:
+ if isinstance(text, bytes):
+ return text.decode(encoding)
+ else:
+ if isinstance(text, unicode): # noqa
+ return text.encode(encoding)
+
+ return text
+
+ @cached_property
+ def _distro_release_info(self):
+ # type: () -> Dict[str, str]
"""
Get the information items from the specified distro release file.
@@ -972,20 +1234,21 @@ class LinuxDistribution(object):
if self.distro_release_file:
# If it was specified, we use it and parse what we can, even if
# its file name or content does not match the expected pattern.
- distro_info = self._parse_distro_release_file(
- self.distro_release_file)
+ distro_info = self._parse_distro_release_file(self.distro_release_file)
basename = os.path.basename(self.distro_release_file)
# The file name pattern for user-specified distro release files
# is somewhat more tolerant (compared to when searching for the
# file), because we want to use what was specified as best as
# possible.
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
- if match:
- distro_info['id'] = match.group(1)
+ if "name" in distro_info and "cloudlinux" in distro_info["name"].lower():
+ distro_info["id"] = "cloudlinux"
+ elif match:
+ distro_info["id"] = match.group(1)
return distro_info
else:
try:
- basenames = os.listdir(_UNIXCONFDIR)
+ basenames = os.listdir(self.etc_dir)
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
@@ -995,33 +1258,41 @@ class LinuxDistribution(object):
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
- basenames = ['SuSE-release',
- 'arch-release',
- 'base-release',
- 'centos-release',
- 'fedora-release',
- 'gentoo-release',
- 'mageia-release',
- 'manjaro-release',
- 'oracle-release',
- 'redhat-release',
- 'sl-release',
- 'slackware-version']
+ basenames = [
+ "SuSE-release",
+ "arch-release",
+ "base-release",
+ "centos-release",
+ "fedora-release",
+ "gentoo-release",
+ "mageia-release",
+ "mandrake-release",
+ "mandriva-release",
+ "mandrivalinux-release",
+ "manjaro-release",
+ "oracle-release",
+ "redhat-release",
+ "sl-release",
+ "slackware-version",
+ ]
for basename in basenames:
if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
continue
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
if match:
- filepath = os.path.join(_UNIXCONFDIR, basename)
+ filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
- if 'name' in distro_info:
+ if "name" in distro_info:
# The name is always present if the pattern matches
self.distro_release_file = filepath
- distro_info['id'] = match.group(1)
+ distro_info["id"] = match.group(1)
+ if "cloudlinux" in distro_info["name"].lower():
+ distro_info["id"] = "cloudlinux"
return distro_info
return {}
def _parse_distro_release_file(self, filepath):
+ # type: (str) -> Dict[str, str]
"""
Parse a distro release file.
@@ -1040,11 +1311,12 @@ class LinuxDistribution(object):
except (OSError, IOError):
# Ignore not being able to read a specific, seemingly version
# related file.
- # See https://github.com/nir0s/distro/issues/162
+ # See https://github.com/python-distro/distro/issues/162
return {}
@staticmethod
def _parse_distro_release_content(line):
+ # type: (str) -> Dict[str, str]
"""
Parse a line from a distro release file.
@@ -1055,20 +1327,17 @@ class LinuxDistribution(object):
Returns:
A dictionary containing all information items.
"""
- if isinstance(line, bytes):
- line = line.decode('utf-8')
- matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(
- line.strip()[::-1])
+ matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
distro_info = {}
if matches:
# regexp ensures non-None
- distro_info['name'] = matches.group(3)[::-1]
+ distro_info["name"] = matches.group(3)[::-1]
if matches.group(2):
- distro_info['version_id'] = matches.group(2)[::-1]
+ distro_info["version_id"] = matches.group(2)[::-1]
if matches.group(1):
- distro_info['codename'] = matches.group(1)[::-1]
+ distro_info["codename"] = matches.group(1)[::-1]
elif line:
- distro_info['name'] = line.strip()
+ distro_info["name"] = line.strip()
return distro_info
@@ -1076,27 +1345,42 @@ _distro = LinuxDistribution()
def main():
+ # type: () -> None
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
- parser = argparse.ArgumentParser(description="Linux distro info tool")
+ parser = argparse.ArgumentParser(description="OS distro info tool")
parser.add_argument(
- '--json',
- '-j',
- help="Output in machine readable format",
- action="store_true")
+ "--json", "-j", help="Output in machine readable format", action="store_true"
+ )
+
+ parser.add_argument(
+ "--root-dir",
+ "-r",
+ type=str,
+ dest="root_dir",
+ help="Path to the root filesystem directory (defaults to /)",
+ )
+
args = parser.parse_args()
+ if args.root_dir:
+ dist = LinuxDistribution(
+ include_lsb=False, include_uname=False, root_dir=args.root_dir
+ )
+ else:
+ dist = _distro
+
if args.json:
- logger.info(json.dumps(info(), indent=4, sort_keys=True))
+ logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
else:
- logger.info('Name: %s', name(pretty=True))
- distribution_version = version(pretty=True)
- logger.info('Version: %s', distribution_version)
- distribution_codename = codename()
- logger.info('Codename: %s', distribution_codename)
+ logger.info("Name: %s", dist.name(pretty=True))
+ distribution_version = dist.version(pretty=True)
+ logger.info("Version: %s", distribution_version)
+ distribution_codename = dist.codename()
+ logger.info("Codename: %s", distribution_codename)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/lib/spack/external/functools_backport.py b/lib/spack/external/functools_backport.py
deleted file mode 100644
index b3c913ffd7..0000000000
--- a/lib/spack/external/functools_backport.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#
-# Backport of Python 2.7's total_ordering.
-#
-
-def total_ordering(cls):
- """Class decorator that fills in missing ordering methods"""
- convert = {
- '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
- ('__le__', lambda self, other: self < other or self == other),
- ('__ge__', lambda self, other: not self < other)],
- '__le__': [('__ge__', lambda self, other: not self <= other or self == other),
- ('__lt__', lambda self, other: self <= other and not self == other),
- ('__gt__', lambda self, other: not self <= other)],
- '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
- ('__ge__', lambda self, other: self > other or self == other),
- ('__le__', lambda self, other: not self > other)],
- '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
- ('__gt__', lambda self, other: self >= other and not self == other),
- ('__lt__', lambda self, other: not self >= other)]
- }
- roots = set(dir(cls)) & set(convert)
- if not roots:
- raise ValueError('must define at least one ordering operation: < > <= >=')
- root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
- for opname, opfunc in convert[root]:
- if opname not in roots:
- opfunc.__name__ = opname
- opfunc.__doc__ = getattr(int, opname).__doc__
- setattr(cls, opname, opfunc)
- return cls
-
-
-@total_ordering
-class reverse_order(object):
- """Helper for creating key functions.
-
- This is a wrapper that inverts the sense of the natural
- comparisons on the object.
- """
- def __init__(self, value):
- self.value = value
-
- def __eq__(self, other):
- return other.value == self.value
-
- def __lt__(self, other):
- return other.value < self.value
diff --git a/lib/spack/external/jinja2/LICENSE.rst b/lib/spack/external/jinja2/LICENSE.rst
new file mode 100644
index 0000000000..c37cae49ec
--- /dev/null
+++ b/lib/spack/external/jinja2/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/lib/spack/external/jinja2/__init__.py b/lib/spack/external/jinja2/__init__.py
index 42aa763d57..f17866f6c4 100644
--- a/lib/spack/external/jinja2/__init__.py
+++ b/lib/spack/external/jinja2/__init__.py
@@ -1,83 +1,44 @@
# -*- coding: utf-8 -*-
+"""Jinja is a template engine written in pure Python. It provides a
+non-XML syntax that supports inline expressions and an optional
+sandboxed environment.
"""
- jinja2
- ~~~~~~
-
- Jinja2 is a template engine written in pure Python. It provides a
- Django inspired non-XML syntax but supports inline expressions and
- an optional sandboxed environment.
-
- Nutshell
- --------
-
- Here a small example of a Jinja2 template::
-
- {% extends 'base.html' %}
- {% block title %}Memberlist{% endblock %}
- {% block content %}
- <ul>
- {% for user in users %}
- <li><a href="{{ user.url }}">{{ user.username }}</a></li>
- {% endfor %}
- </ul>
- {% endblock %}
-
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-__docformat__ = 'restructuredtext en'
-__version__ = '2.10'
-
-# high level interface
-from jinja2.environment import Environment, Template
-
-# loaders
-from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
- DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
- ModuleLoader
-
-# bytecode caches
-from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
- MemcachedBytecodeCache
-
-# undefined types
-from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined, \
- make_logging_undefined
-
-# exceptions
-from jinja2.exceptions import TemplateError, UndefinedError, \
- TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
- TemplateAssertionError, TemplateRuntimeError
-
-# decorators and public utilities
-from jinja2.filters import environmentfilter, contextfilter, \
- evalcontextfilter
-from jinja2.utils import Markup, escape, clear_caches, \
- environmentfunction, evalcontextfunction, contextfunction, \
- is_undefined, select_autoescape
-
-__all__ = [
- 'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
- 'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
- 'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
- 'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
- 'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
- 'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
- 'TemplateRuntimeError',
- 'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
- 'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
- 'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined',
- 'select_autoescape',
-]
-
-
-def _patch_async():
- from jinja2.utils import have_async_gen
- if have_async_gen:
- from jinja2.asyncsupport import patch_all
- patch_all()
-
-
-_patch_async()
-del _patch_async
+from markupsafe import escape
+from markupsafe import Markup
+
+from .bccache import BytecodeCache
+from .bccache import FileSystemBytecodeCache
+from .bccache import MemcachedBytecodeCache
+from .environment import Environment
+from .environment import Template
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateError
+from .exceptions import TemplateNotFound
+from .exceptions import TemplateRuntimeError
+from .exceptions import TemplatesNotFound
+from .exceptions import TemplateSyntaxError
+from .exceptions import UndefinedError
+from .filters import contextfilter
+from .filters import environmentfilter
+from .filters import evalcontextfilter
+from .loaders import BaseLoader
+from .loaders import ChoiceLoader
+from .loaders import DictLoader
+from .loaders import FileSystemLoader
+from .loaders import FunctionLoader
+from .loaders import ModuleLoader
+from .loaders import PackageLoader
+from .loaders import PrefixLoader
+from .runtime import ChainableUndefined
+from .runtime import DebugUndefined
+from .runtime import make_logging_undefined
+from .runtime import StrictUndefined
+from .runtime import Undefined
+from .utils import clear_caches
+from .utils import contextfunction
+from .utils import environmentfunction
+from .utils import evalcontextfunction
+from .utils import is_undefined
+from .utils import select_autoescape
+
+__version__ = "2.11.3"
diff --git a/lib/spack/external/jinja2/_compat.py b/lib/spack/external/jinja2/_compat.py
index 61d85301a4..1f044954a0 100644
--- a/lib/spack/external/jinja2/_compat.py
+++ b/lib/spack/external/jinja2/_compat.py
@@ -1,22 +1,12 @@
# -*- coding: utf-8 -*-
-"""
- jinja2._compat
- ~~~~~~~~~~~~~~
-
- Some py2/py3 compatibility support based on a stripped down
- version of six so we don't have to depend on a specific version
- of it.
-
- :copyright: Copyright 2013 by the Jinja team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
+# flake8: noqa
+import marshal
import sys
PY2 = sys.version_info[0] == 2
-PYPY = hasattr(sys, 'pypy_translation_info')
+PYPY = hasattr(sys, "pypy_translation_info")
_identity = lambda x: x
-
if not PY2:
unichr = chr
range_type = range
@@ -30,6 +20,7 @@ if not PY2:
import pickle
from io import BytesIO, StringIO
+
NativeStringIO = StringIO
def reraise(tp, value, tb=None):
@@ -46,6 +37,9 @@ if not PY2:
implements_to_string = _identity
encode_filename = _identity
+ marshal_dump = marshal.dump
+ marshal_load = marshal.load
+
else:
unichr = unichr
text_type = unicode
@@ -59,11 +53,13 @@ else:
import cPickle as pickle
from cStringIO import StringIO as BytesIO, StringIO
+
NativeStringIO = BytesIO
- exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
+ exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
from itertools import imap, izip, ifilter
+
intern = intern
def implements_iterator(cls):
@@ -73,14 +69,25 @@ else:
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
- cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
+ cls.__str__ = lambda x: x.__unicode__().encode("utf-8")
return cls
def encode_filename(filename):
if isinstance(filename, unicode):
- return filename.encode('utf-8')
+ return filename.encode("utf-8")
return filename
+ def marshal_dump(code, f):
+ if isinstance(f, file):
+ marshal.dump(code, f)
+ else:
+ f.write(marshal.dumps(code))
+
+ def marshal_load(f):
+ if isinstance(f, file):
+ return marshal.load(f)
+ return marshal.loads(f.read())
+
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
@@ -90,10 +97,36 @@ def with_metaclass(meta, *bases):
class metaclass(type):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
+
+ return type.__new__(metaclass, "temporary_class", (), {})
try:
from urllib.parse import quote_from_bytes as url_quote
except ImportError:
from urllib import quote as url_quote
+
+
+try:
+ from collections import abc
+except ImportError:
+ import collections as abc
+
+
+try:
+ from os import fspath
+except ImportError:
+ try:
+ from pathlib import PurePath
+ except ImportError:
+ PurePath = None
+
+ def fspath(path):
+ if hasattr(path, "__fspath__"):
+ return path.__fspath__()
+
+ # Python 3.5 doesn't have __fspath__ yet, use str.
+ if PurePath is not None and isinstance(path, PurePath):
+ return str(path)
+
+ return path
diff --git a/lib/spack/external/jinja2/_identifier.py b/lib/spack/external/jinja2/_identifier.py
index 2eac35d5c3..224d5449d1 100644
--- a/lib/spack/external/jinja2/_identifier.py
+++ b/lib/spack/external/jinja2/_identifier.py
@@ -1,2 +1,6 @@
+import re
+
# generated by scripts/generate_identifier_pattern.py
-pattern = '·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯'
+pattern = re.compile(
+ r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950
+)
diff --git a/lib/spack/external/jinja2/asyncfilters.py b/lib/spack/external/jinja2/asyncfilters.py
index 5c1f46d7fa..3d98dbcc00 100644
--- a/lib/spack/external/jinja2/asyncfilters.py
+++ b/lib/spack/external/jinja2/asyncfilters.py
@@ -1,12 +1,13 @@
from functools import wraps
-from jinja2.asyncsupport import auto_aiter
-from jinja2 import filters
+from . import filters
+from .asyncsupport import auto_aiter
+from .asyncsupport import auto_await
async def auto_to_seq(value):
seq = []
- if hasattr(value, '__aiter__'):
+ if hasattr(value, "__aiter__"):
async for item in value:
seq.append(item)
else:
@@ -16,8 +17,7 @@ async def auto_to_seq(value):
async def async_select_or_reject(args, kwargs, modfunc, lookup_attr):
- seq, func = filters.prepare_select_or_reject(
- args, kwargs, modfunc, lookup_attr)
+ seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
if seq:
async for item in auto_aiter(seq):
if func(item):
@@ -26,14 +26,19 @@ async def async_select_or_reject(args, kwargs, modfunc, lookup_attr):
def dualfilter(normal_filter, async_filter):
wrap_evalctx = False
- if getattr(normal_filter, 'environmentfilter', False):
- is_async = lambda args: args[0].is_async
+ if getattr(normal_filter, "environmentfilter", False) is True:
+
+ def is_async(args):
+ return args[0].is_async
+
wrap_evalctx = False
else:
- if not getattr(normal_filter, 'evalcontextfilter', False) and \
- not getattr(normal_filter, 'contextfilter', False):
- wrap_evalctx = True
- is_async = lambda args: args[0].environment.is_async
+ has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True
+ has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True
+ wrap_evalctx = not has_evalctxfilter and not has_ctxfilter
+
+ def is_async(args):
+ return args[0].environment.is_async
@wraps(normal_filter)
def wrapper(*args, **kwargs):
@@ -55,6 +60,7 @@ def dualfilter(normal_filter, async_filter):
def asyncfiltervariant(original):
def decorator(f):
return dualfilter(original, f)
+
return decorator
@@ -63,19 +69,22 @@ async def do_first(environment, seq):
try:
return await auto_aiter(seq).__anext__()
except StopAsyncIteration:
- return environment.undefined('No first item, sequence was empty.')
+ return environment.undefined("No first item, sequence was empty.")
@asyncfiltervariant(filters.do_groupby)
async def do_groupby(environment, value, attribute):
expr = filters.make_attrgetter(environment, attribute)
- return [filters._GroupTuple(key, await auto_to_seq(values))
- for key, values in filters.groupby(sorted(
- await auto_to_seq(value), key=expr), expr)]
+ return [
+ filters._GroupTuple(key, await auto_to_seq(values))
+ for key, values in filters.groupby(
+ sorted(await auto_to_seq(value), key=expr), expr
+ )
+ ]
@asyncfiltervariant(filters.do_join)
-async def do_join(eval_ctx, value, d=u'', attribute=None):
+async def do_join(eval_ctx, value, d=u"", attribute=None):
return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute)
@@ -109,7 +118,7 @@ async def do_map(*args, **kwargs):
seq, func = filters.prepare_map(args, kwargs)
if seq:
async for item in auto_aiter(seq):
- yield func(item)
+ yield await auto_await(func(item))
@asyncfiltervariant(filters.do_sum)
@@ -118,7 +127,10 @@ async def do_sum(environment, iterable, attribute=None, start=0):
if attribute is not None:
func = filters.make_attrgetter(environment, attribute)
else:
- func = lambda x: x
+
+ def func(x):
+ return x
+
async for item in auto_aiter(iterable):
rv += func(item)
return rv
@@ -130,17 +142,17 @@ async def do_slice(value, slices, fill_with=None):
ASYNC_FILTERS = {
- 'first': do_first,
- 'groupby': do_groupby,
- 'join': do_join,
- 'list': do_list,
+ "first": do_first,
+ "groupby": do_groupby,
+ "join": do_join,
+ "list": do_list,
# we intentionally do not support do_last because that would be
# ridiculous
- 'reject': do_reject,
- 'rejectattr': do_rejectattr,
- 'map': do_map,
- 'select': do_select,
- 'selectattr': do_selectattr,
- 'sum': do_sum,
- 'slice': do_slice,
+ "reject": do_reject,
+ "rejectattr": do_rejectattr,
+ "map": do_map,
+ "select": do_select,
+ "selectattr": do_selectattr,
+ "sum": do_sum,
+ "slice": do_slice,
}
diff --git a/lib/spack/external/jinja2/asyncsupport.py b/lib/spack/external/jinja2/asyncsupport.py
index b1e7b5ce9a..78ba3739d8 100644
--- a/lib/spack/external/jinja2/asyncsupport.py
+++ b/lib/spack/external/jinja2/asyncsupport.py
@@ -1,29 +1,27 @@
# -*- coding: utf-8 -*-
+"""The code for async support. Importing this patches Jinja on supported
+Python versions.
"""
- jinja2.asyncsupport
- ~~~~~~~~~~~~~~~~~~~
-
- Has all the code for async support which is implemented as a patch
- for supported Python versions.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-import sys
import asyncio
import inspect
from functools import update_wrapper
-from jinja2.utils import concat, internalcode, Markup
-from jinja2.environment import TemplateModule
-from jinja2.runtime import LoopContextBase, _last_iteration
+from markupsafe import Markup
+
+from .environment import TemplateModule
+from .runtime import LoopContext
+from .utils import concat
+from .utils import internalcode
+from .utils import missing
async def concat_async(async_gen):
rv = []
+
async def collect():
async for event in async_gen:
rv.append(event)
+
await collect()
return concat(rv)
@@ -34,10 +32,7 @@ async def generate_async(self, *args, **kwargs):
async for event in self.root_render_func(self.new_context(vars)):
yield event
except Exception:
- exc_info = sys.exc_info()
- else:
- return
- yield self.environment.handle_exception(exc_info, True)
+ yield self.environment.handle_exception()
def wrap_generate_func(original_generate):
@@ -48,17 +43,18 @@ def wrap_generate_func(original_generate):
yield loop.run_until_complete(async_gen.__anext__())
except StopAsyncIteration:
pass
+
def generate(self, *args, **kwargs):
if not self.environment.is_async:
return original_generate(self, *args, **kwargs)
return _convert_generator(self, asyncio.get_event_loop(), args, kwargs)
+
return update_wrapper(generate, original_generate)
async def render_async(self, *args, **kwargs):
if not self.environment.is_async:
- raise RuntimeError('The environment was not created with async mode '
- 'enabled.')
+ raise RuntimeError("The environment was not created with async mode enabled.")
vars = dict(*args, **kwargs)
ctx = self.new_context(vars)
@@ -66,8 +62,7 @@ async def render_async(self, *args, **kwargs):
try:
return await concat_async(self.root_render_func(ctx))
except Exception:
- exc_info = sys.exc_info()
- return self.environment.handle_exception(exc_info, True)
+ return self.environment.handle_exception()
def wrap_render_func(original_render):
@@ -76,6 +71,7 @@ def wrap_render_func(original_render):
return original_render(self, *args, **kwargs)
loop = asyncio.get_event_loop()
return loop.run_until_complete(self.render_async(*args, **kwargs))
+
return update_wrapper(render, original_render)
@@ -109,6 +105,7 @@ def wrap_macro_invoke(original_invoke):
if not self._environment.is_async:
return original_invoke(self, arguments, autoescape)
return async_invoke(self, arguments, autoescape)
+
return update_wrapper(_invoke, original_invoke)
@@ -124,9 +121,9 @@ def wrap_default_module(original_default_module):
@internalcode
def _get_default_module(self):
if self.environment.is_async:
- raise RuntimeError('Template module attribute is unavailable '
- 'in async mode')
+ raise RuntimeError("Template module attribute is unavailable in async mode")
return original_default_module(self)
+
return _get_default_module
@@ -139,30 +136,30 @@ async def make_module_async(self, vars=None, shared=False, locals=None):
def patch_template():
- from jinja2 import Template
+ from . import Template
+
Template.generate = wrap_generate_func(Template.generate)
- Template.generate_async = update_wrapper(
- generate_async, Template.generate_async)
- Template.render_async = update_wrapper(
- render_async, Template.render_async)
+ Template.generate_async = update_wrapper(generate_async, Template.generate_async)
+ Template.render_async = update_wrapper(render_async, Template.render_async)
Template.render = wrap_render_func(Template.render)
- Template._get_default_module = wrap_default_module(
- Template._get_default_module)
+ Template._get_default_module = wrap_default_module(Template._get_default_module)
Template._get_default_module_async = get_default_module_async
Template.make_module_async = update_wrapper(
- make_module_async, Template.make_module_async)
+ make_module_async, Template.make_module_async
+ )
def patch_runtime():
- from jinja2.runtime import BlockReference, Macro
- BlockReference.__call__ = wrap_block_reference_call(
- BlockReference.__call__)
+ from .runtime import BlockReference, Macro
+
+ BlockReference.__call__ = wrap_block_reference_call(BlockReference.__call__)
Macro._invoke = wrap_macro_invoke(Macro._invoke)
def patch_filters():
- from jinja2.filters import FILTERS
- from jinja2.asyncfilters import ASYNC_FILTERS
+ from .filters import FILTERS
+ from .asyncfilters import ASYNC_FILTERS
+
FILTERS.update(ASYNC_FILTERS)
@@ -179,7 +176,7 @@ async def auto_await(value):
async def auto_aiter(iterable):
- if hasattr(iterable, '__aiter__'):
+ if hasattr(iterable, "__aiter__"):
async for item in iterable:
yield item
return
@@ -187,70 +184,81 @@ async def auto_aiter(iterable):
yield item
-class AsyncLoopContext(LoopContextBase):
-
- def __init__(self, async_iterator, undefined, after, length, recurse=None,
- depth0=0):
- LoopContextBase.__init__(self, undefined, recurse, depth0)
- self._async_iterator = async_iterator
- self._after = after
- self._length = length
+class AsyncLoopContext(LoopContext):
+ _to_iterator = staticmethod(auto_aiter)
@property
- def length(self):
- if self._length is None:
- raise TypeError('Loop length for some iterators cannot be '
- 'lazily calculated in async mode')
+ async def length(self):
+ if self._length is not None:
+ return self._length
+
+ try:
+ self._length = len(self._iterable)
+ except TypeError:
+ iterable = [x async for x in self._iterator]
+ self._iterator = self._to_iterator(iterable)
+ self._length = len(iterable) + self.index + (self._after is not missing)
+
return self._length
- def __aiter__(self):
- return AsyncLoopContextIterator(self)
+ @property
+ async def revindex0(self):
+ return await self.length - self.index
+ @property
+ async def revindex(self):
+ return await self.length - self.index0
+
+ async def _peek_next(self):
+ if self._after is not missing:
+ return self._after
+
+ try:
+ self._after = await self._iterator.__anext__()
+ except StopAsyncIteration:
+ self._after = missing
-class AsyncLoopContextIterator(object):
- __slots__ = ('context',)
+ return self._after
- def __init__(self, context):
- self.context = context
+ @property
+ async def last(self):
+ return await self._peek_next() is missing
+
+ @property
+ async def nextitem(self):
+ rv = await self._peek_next()
+
+ if rv is missing:
+ return self._undefined("there is no next item")
+
+ return rv
def __aiter__(self):
return self
async def __anext__(self):
- ctx = self.context
- ctx.index0 += 1
- if ctx._after is _last_iteration:
- raise StopAsyncIteration()
- ctx._before = ctx._current
- ctx._current = ctx._after
- try:
- ctx._after = await ctx._async_iterator.__anext__()
- except StopAsyncIteration:
- ctx._after = _last_iteration
- return ctx._current, ctx
+ if self._after is not missing:
+ rv = self._after
+ self._after = missing
+ else:
+ rv = await self._iterator.__anext__()
+
+ self.index0 += 1
+ self._before = self._current
+ self._current = rv
+ return rv, self
async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0):
- # Length is more complicated and less efficient in async mode. The
- # reason for this is that we cannot know if length will be used
- # upfront but because length is a property we cannot lazily execute it
- # later. This means that we need to buffer it up and measure :(
- #
- # We however only do this for actual iterators, not for async
- # iterators as blocking here does not seem like the best idea in the
- # world.
- try:
- length = len(iterable)
- except (TypeError, AttributeError):
- if not hasattr(iterable, '__aiter__'):
- iterable = tuple(iterable)
- length = len(iterable)
- else:
- length = None
- async_iterator = auto_aiter(iterable)
- try:
- after = await async_iterator.__anext__()
- except StopAsyncIteration:
- after = _last_iteration
- return AsyncLoopContext(async_iterator, undefined, after, length, recurse,
- depth0)
+ import warnings
+
+ warnings.warn(
+ "This template must be recompiled with at least Jinja 2.11, or"
+ " it will fail in 3.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return AsyncLoopContext(iterable, undefined, recurse, depth0)
+
+
+patch_all()
diff --git a/lib/spack/external/jinja2/bccache.py b/lib/spack/external/jinja2/bccache.py
index 080e527cab..9c0661030f 100644
--- a/lib/spack/external/jinja2/bccache.py
+++ b/lib/spack/external/jinja2/bccache.py
@@ -1,60 +1,37 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.bccache
- ~~~~~~~~~~~~~~
-
- This module implements the bytecode cache system Jinja is optionally
- using. This is useful if you have very complex template situations and
- the compiliation of all those templates slow down your application too
- much.
-
- Situations where this is useful are often forking web applications that
- are initialized on the first request.
+"""The optional bytecode cache system. This is useful if you have very
+complex template situations and the compilation of all those templates
+slows down your application too much.
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD.
+Situations where this is useful are often forking web applications that
+are initialized on the first request.
"""
-from os import path, listdir
+import errno
+import fnmatch
import os
-import sys
import stat
-import errno
-import marshal
+import sys
import tempfile
-import fnmatch
from hashlib import sha1
-from jinja2.utils import open_if_exists
-from jinja2._compat import BytesIO, pickle, PY2, text_type
-
-
-# marshal works better on 3.x, one hack less required
-if not PY2:
- marshal_dump = marshal.dump
- marshal_load = marshal.load
-else:
-
- def marshal_dump(code, f):
- if isinstance(f, file):
- marshal.dump(code, f)
- else:
- f.write(marshal.dumps(code))
-
- def marshal_load(f):
- if isinstance(f, file):
- return marshal.load(f)
- return marshal.loads(f.read())
-
-
-bc_version = 3
-
-# magic version used to only change with new jinja versions. With 2.6
-# we change this to also take Python version changes into account. The
-# reason for this is that Python tends to segfault if fed earlier bytecode
-# versions because someone thought it would be a good idea to reuse opcodes
-# or make Python incompatible with earlier versions.
-bc_magic = 'j2'.encode('ascii') + \
- pickle.dumps(bc_version, 2) + \
- pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1])
+from os import listdir
+from os import path
+
+from ._compat import BytesIO
+from ._compat import marshal_dump
+from ._compat import marshal_load
+from ._compat import pickle
+from ._compat import text_type
+from .utils import open_if_exists
+
+bc_version = 4
+# Magic bytes to identify Jinja bytecode cache files. Contains the
+# Python major and minor version to avoid loading incompatible bytecode
+# if a project upgrades its Python version.
+bc_magic = (
+ b"j2"
+ + pickle.dumps(bc_version, 2)
+ + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
+)
class Bucket(object):
@@ -98,7 +75,7 @@ class Bucket(object):
def write_bytecode(self, f):
"""Dump the bytecode into the file or file like object passed."""
if self.code is None:
- raise TypeError('can\'t write empty bucket')
+ raise TypeError("can't write empty bucket")
f.write(bc_magic)
pickle.dump(self.checksum, f, 2)
marshal_dump(self.code, f)
@@ -140,7 +117,7 @@ class BytecodeCache(object):
bucket.write_bytecode(f)
A more advanced version of a filesystem based bytecode cache is part of
- Jinja2.
+ Jinja.
"""
def load_bytecode(self, bucket):
@@ -158,24 +135,24 @@ class BytecodeCache(object):
raise NotImplementedError()
def clear(self):
- """Clears the cache. This method is not used by Jinja2 but should be
+ """Clears the cache. This method is not used by Jinja but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
"""
def get_cache_key(self, name, filename=None):
"""Returns the unique hash key for this template name."""
- hash = sha1(name.encode('utf-8'))
+ hash = sha1(name.encode("utf-8"))
if filename is not None:
- filename = '|' + filename
+ filename = "|" + filename
if isinstance(filename, text_type):
- filename = filename.encode('utf-8')
+ filename = filename.encode("utf-8")
hash.update(filename)
return hash.hexdigest()
def get_source_checksum(self, source):
"""Returns a checksum for the source."""
- return sha1(source.encode('utf-8')).hexdigest()
+ return sha1(source.encode("utf-8")).hexdigest()
def get_bucket(self, environment, name, filename, source):
"""Return a cache bucket for the given template. All arguments are
@@ -210,7 +187,7 @@ class FileSystemBytecodeCache(BytecodeCache):
This bytecode cache supports clearing of the cache using the clear method.
"""
- def __init__(self, directory=None, pattern='__jinja2_%s.cache'):
+ def __init__(self, directory=None, pattern="__jinja2_%s.cache"):
if directory is None:
directory = self._get_default_cache_dir()
self.directory = directory
@@ -218,19 +195,21 @@ class FileSystemBytecodeCache(BytecodeCache):
def _get_default_cache_dir(self):
def _unsafe_dir():
- raise RuntimeError('Cannot determine safe temp directory. You '
- 'need to explicitly provide one.')
+ raise RuntimeError(
+ "Cannot determine safe temp directory. You "
+ "need to explicitly provide one."
+ )
tmpdir = tempfile.gettempdir()
# On windows the temporary directory is used specific unless
# explicitly forced otherwise. We can just use that.
- if os.name == 'nt':
+ if os.name == "nt":
return tmpdir
- if not hasattr(os, 'getuid'):
+ if not hasattr(os, "getuid"):
_unsafe_dir()
- dirname = '_jinja2-cache-%d' % os.getuid()
+ dirname = "_jinja2-cache-%d" % os.getuid()
actual_dir = os.path.join(tmpdir, dirname)
try:
@@ -241,18 +220,22 @@ class FileSystemBytecodeCache(BytecodeCache):
try:
os.chmod(actual_dir, stat.S_IRWXU)
actual_dir_stat = os.lstat(actual_dir)
- if actual_dir_stat.st_uid != os.getuid() \
- or not stat.S_ISDIR(actual_dir_stat.st_mode) \
- or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU:
+ if (
+ actual_dir_stat.st_uid != os.getuid()
+ or not stat.S_ISDIR(actual_dir_stat.st_mode)
+ or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+ ):
_unsafe_dir()
except OSError as e:
if e.errno != errno.EEXIST:
raise
actual_dir_stat = os.lstat(actual_dir)
- if actual_dir_stat.st_uid != os.getuid() \
- or not stat.S_ISDIR(actual_dir_stat.st_mode) \
- or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU:
+ if (
+ actual_dir_stat.st_uid != os.getuid()
+ or not stat.S_ISDIR(actual_dir_stat.st_mode)
+ or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+ ):
_unsafe_dir()
return actual_dir
@@ -261,7 +244,7 @@ class FileSystemBytecodeCache(BytecodeCache):
return path.join(self.directory, self.pattern % bucket.key)
def load_bytecode(self, bucket):
- f = open_if_exists(self._get_cache_filename(bucket), 'rb')
+ f = open_if_exists(self._get_cache_filename(bucket), "rb")
if f is not None:
try:
bucket.load_bytecode(f)
@@ -269,7 +252,7 @@ class FileSystemBytecodeCache(BytecodeCache):
f.close()
def dump_bytecode(self, bucket):
- f = open(self._get_cache_filename(bucket), 'wb')
+ f = open(self._get_cache_filename(bucket), "wb")
try:
bucket.write_bytecode(f)
finally:
@@ -280,7 +263,8 @@ class FileSystemBytecodeCache(BytecodeCache):
# write access on the file system and the function does not exist
# normally.
from os import remove
- files = fnmatch.filter(listdir(self.directory), self.pattern % '*')
+
+ files = fnmatch.filter(listdir(self.directory), self.pattern % "*")
for filename in files:
try:
remove(path.join(self.directory, filename))
@@ -296,9 +280,8 @@ class MemcachedBytecodeCache(BytecodeCache):
Libraries compatible with this class:
- - `werkzeug <http://werkzeug.pocoo.org/>`_.contrib.cache
- - `python-memcached <https://www.tummy.com/Community/software/python-memcached/>`_
- - `cmemcache <http://gijsbert.org/cmemcache/>`_
+ - `cachelib <https://github.com/pallets/cachelib>`_
+ - `python-memcached <https://pypi.org/project/python-memcached/>`_
(Unfortunately the django cache interface is not compatible because it
does not support storing binary data, only unicode. You can however pass
@@ -334,8 +317,13 @@ class MemcachedBytecodeCache(BytecodeCache):
`ignore_memcache_errors` parameter.
"""
- def __init__(self, client, prefix='jinja2/bytecode/', timeout=None,
- ignore_memcache_errors=True):
+ def __init__(
+ self,
+ client,
+ prefix="jinja2/bytecode/",
+ timeout=None,
+ ignore_memcache_errors=True,
+ ):
self.client = client
self.prefix = prefix
self.timeout = timeout
diff --git a/lib/spack/external/jinja2/compiler.py b/lib/spack/external/jinja2/compiler.py
index d534a82739..63297b42c3 100644
--- a/lib/spack/external/jinja2/compiler.py
+++ b/lib/spack/external/jinja2/compiler.py
@@ -1,59 +1,62 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.compiler
- ~~~~~~~~~~~~~~~
-
- Compiles nodes into python code.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
+"""Compiles nodes from the parser into Python code."""
+from collections import namedtuple
+from functools import update_wrapper
from itertools import chain
-from copy import deepcopy
from keyword import iskeyword as is_python_keyword
-from functools import update_wrapper
-from jinja2 import nodes
-from jinja2.nodes import EvalContext
-from jinja2.visitor import NodeVisitor
-from jinja2.optimizer import Optimizer
-from jinja2.exceptions import TemplateAssertionError
-from jinja2.utils import Markup, concat, escape
-from jinja2._compat import range_type, text_type, string_types, \
- iteritems, NativeStringIO, imap, izip
-from jinja2.idtracking import Symbols, VAR_LOAD_PARAMETER, \
- VAR_LOAD_RESOLVE, VAR_LOAD_ALIAS, VAR_LOAD_UNDEFINED
+from markupsafe import escape
+from markupsafe import Markup
+
+from . import nodes
+from ._compat import imap
+from ._compat import iteritems
+from ._compat import izip
+from ._compat import NativeStringIO
+from ._compat import range_type
+from ._compat import string_types
+from ._compat import text_type
+from .exceptions import TemplateAssertionError
+from .idtracking import Symbols
+from .idtracking import VAR_LOAD_ALIAS
+from .idtracking import VAR_LOAD_PARAMETER
+from .idtracking import VAR_LOAD_RESOLVE
+from .idtracking import VAR_LOAD_UNDEFINED
+from .nodes import EvalContext
+from .optimizer import Optimizer
+from .utils import concat
+from .visitor import NodeVisitor
operators = {
- 'eq': '==',
- 'ne': '!=',
- 'gt': '>',
- 'gteq': '>=',
- 'lt': '<',
- 'lteq': '<=',
- 'in': 'in',
- 'notin': 'not in'
+ "eq": "==",
+ "ne": "!=",
+ "gt": ">",
+ "gteq": ">=",
+ "lt": "<",
+ "lteq": "<=",
+ "in": "in",
+ "notin": "not in",
}
# what method to iterate over items do we want to use for dict iteration
# in generated code? on 2.x let's go with iteritems, on 3.x with items
-if hasattr(dict, 'iteritems'):
- dict_item_iter = 'iteritems'
+if hasattr(dict, "iteritems"):
+ dict_item_iter = "iteritems"
else:
- dict_item_iter = 'items'
+ dict_item_iter = "items"
-code_features = ['division']
+code_features = ["division"]
# does this python version support generator stops? (PEP 0479)
try:
- exec('from __future__ import generator_stop')
- code_features.append('generator_stop')
+ exec("from __future__ import generator_stop")
+ code_features.append("generator_stop")
except SyntaxError:
pass
# does this python version support yield from?
try:
- exec('def f(): yield from x()')
+ exec("def f(): yield from x()")
except SyntaxError:
supports_yield_from = False
else:
@@ -68,17 +71,19 @@ def optimizeconst(f):
if new_node != node:
return self.visit(new_node, frame)
return f(self, node, frame, **kwargs)
+
return update_wrapper(new_func, f)
-def generate(node, environment, name, filename, stream=None,
- defer_init=False, optimized=True):
+def generate(
+ node, environment, name, filename, stream=None, defer_init=False, optimized=True
+):
"""Generate the python source for a node tree."""
if not isinstance(node, nodes.Template):
- raise TypeError('Can\'t compile non template nodes')
- generator = environment.code_generator_class(environment, name, filename,
- stream, defer_init,
- optimized)
+ raise TypeError("Can't compile non template nodes")
+ generator = environment.code_generator_class(
+ environment, name, filename, stream, defer_init, optimized
+ )
generator.visit(node)
if stream is None:
return generator.stream.getvalue()
@@ -119,7 +124,6 @@ def find_undeclared(nodes, names):
class MacroRef(object):
-
def __init__(self, node):
self.node = node
self.accesses_caller = False
@@ -132,8 +136,7 @@ class Frame(object):
def __init__(self, eval_ctx, parent=None, level=None):
self.eval_ctx = eval_ctx
- self.symbols = Symbols(parent and parent.symbols or None,
- level=level)
+ self.symbols = Symbols(parent and parent.symbols or None, level=level)
# a toplevel frame is the root + soft frames such as if conditions.
self.toplevel = False
@@ -223,7 +226,7 @@ class UndeclaredNameVisitor(NodeVisitor):
self.undeclared = set()
def visit_Name(self, node):
- if node.ctx == 'load' and node.name in self.names:
+ if node.ctx == "load" and node.name in self.names:
self.undeclared.add(node.name)
if self.undeclared == self.names:
raise VisitorExit()
@@ -242,9 +245,9 @@ class CompilerExit(Exception):
class CodeGenerator(NodeVisitor):
-
- def __init__(self, environment, name, filename, stream=None,
- defer_init=False, optimized=True):
+ def __init__(
+ self, environment, name, filename, stream=None, defer_init=False, optimized=True
+ ):
if stream is None:
stream = NativeStringIO()
self.environment = environment
@@ -306,7 +309,7 @@ class CodeGenerator(NodeVisitor):
self._param_def_block = []
# Tracks the current context.
- self._context_reference_stack = ['context']
+ self._context_reference_stack = ["context"]
# -- Various compilation helpers
@@ -317,30 +320,30 @@ class CodeGenerator(NodeVisitor):
def temporary_identifier(self):
"""Get a new unique identifier."""
self._last_identifier += 1
- return 't_%d' % self._last_identifier
+ return "t_%d" % self._last_identifier
def buffer(self, frame):
"""Enable buffering for the frame from that point onwards."""
frame.buffer = self.temporary_identifier()
- self.writeline('%s = []' % frame.buffer)
+ self.writeline("%s = []" % frame.buffer)
def return_buffer_contents(self, frame, force_unescaped=False):
"""Return the buffer contents of the frame."""
if not force_unescaped:
if frame.eval_ctx.volatile:
- self.writeline('if context.eval_ctx.autoescape:')
+ self.writeline("if context.eval_ctx.autoescape:")
self.indent()
- self.writeline('return Markup(concat(%s))' % frame.buffer)
+ self.writeline("return Markup(concat(%s))" % frame.buffer)
self.outdent()
- self.writeline('else:')
+ self.writeline("else:")
self.indent()
- self.writeline('return concat(%s)' % frame.buffer)
+ self.writeline("return concat(%s)" % frame.buffer)
self.outdent()
return
elif frame.eval_ctx.autoescape:
- self.writeline('return Markup(concat(%s))' % frame.buffer)
+ self.writeline("return Markup(concat(%s))" % frame.buffer)
return
- self.writeline('return concat(%s)' % frame.buffer)
+ self.writeline("return concat(%s)" % frame.buffer)
def indent(self):
"""Indent by one."""
@@ -353,14 +356,14 @@ class CodeGenerator(NodeVisitor):
def start_write(self, frame, node=None):
"""Yield or write into the frame buffer."""
if frame.buffer is None:
- self.writeline('yield ', node)
+ self.writeline("yield ", node)
else:
- self.writeline('%s.append(' % frame.buffer, node)
+ self.writeline("%s.append(" % frame.buffer, node)
def end_write(self, frame):
"""End the writing process started by `start_write`."""
if frame.buffer is not None:
- self.write(')')
+ self.write(")")
def simple_write(self, s, frame, node=None):
"""Simple shortcut for start_write + write + end_write."""
@@ -373,7 +376,7 @@ class CodeGenerator(NodeVisitor):
is no buffer a dummy ``if 0: yield None`` is written automatically.
"""
try:
- self.writeline('pass')
+ self.writeline("pass")
for node in nodes:
self.visit(node, frame)
except CompilerExit:
@@ -383,14 +386,13 @@ class CodeGenerator(NodeVisitor):
"""Write a string into the output stream."""
if self._new_lines:
if not self._first_write:
- self.stream.write('\n' * self._new_lines)
+ self.stream.write("\n" * self._new_lines)
self.code_lineno += self._new_lines
if self._write_debug_info is not None:
- self.debug_info.append((self._write_debug_info,
- self.code_lineno))
+ self.debug_info.append((self._write_debug_info, self.code_lineno))
self._write_debug_info = None
self._first_write = False
- self.stream.write(' ' * self._indentation)
+ self.stream.write(" " * self._indentation)
self._new_lines = 0
self.stream.write(x)
@@ -410,7 +412,7 @@ class CodeGenerator(NodeVisitor):
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
- error could occour. The extra keyword arguments should be given
+ error could occur. The extra keyword arguments should be given
as python dict.
"""
# if any of the given keyword arguments is a python keyword
@@ -422,41 +424,41 @@ class CodeGenerator(NodeVisitor):
break
for arg in node.args:
- self.write(', ')
+ self.write(", ")
self.visit(arg, frame)
if not kwarg_workaround:
for kwarg in node.kwargs:
- self.write(', ')
+ self.write(", ")
self.visit(kwarg, frame)
if extra_kwargs is not None:
for key, value in iteritems(extra_kwargs):
- self.write(', %s=%s' % (key, value))
+ self.write(", %s=%s" % (key, value))
if node.dyn_args:
- self.write(', *')
+ self.write(", *")
self.visit(node.dyn_args, frame)
if kwarg_workaround:
if node.dyn_kwargs is not None:
- self.write(', **dict({')
+ self.write(", **dict({")
else:
- self.write(', **{')
+ self.write(", **{")
for kwarg in node.kwargs:
- self.write('%r: ' % kwarg.key)
+ self.write("%r: " % kwarg.key)
self.visit(kwarg.value, frame)
- self.write(', ')
+ self.write(", ")
if extra_kwargs is not None:
for key, value in iteritems(extra_kwargs):
- self.write('%r: %s, ' % (key, value))
+ self.write("%r: %s, " % (key, value))
if node.dyn_kwargs is not None:
- self.write('}, **')
+ self.write("}, **")
self.visit(node.dyn_kwargs, frame)
- self.write(')')
+ self.write(")")
else:
- self.write('}')
+ self.write("}")
elif node.dyn_kwargs is not None:
- self.write(', **')
+ self.write(", **")
self.visit(node.dyn_kwargs, frame)
def pull_dependencies(self, nodes):
@@ -464,13 +466,14 @@ class CodeGenerator(NodeVisitor):
visitor = DependencyFinderVisitor()
for node in nodes:
visitor.visit(node)
- for dependency in 'filters', 'tests':
+ for dependency in "filters", "tests":
mapping = getattr(self, dependency)
for name in getattr(visitor, dependency):
if name not in mapping:
mapping[name] = self.temporary_identifier()
- self.writeline('%s = environment.%s[%r]' %
- (mapping[name], dependency, name))
+ self.writeline(
+ "%s = environment.%s[%r]" % (mapping[name], dependency, name)
+ )
def enter_frame(self, frame):
undefs = []
@@ -478,16 +481,15 @@ class CodeGenerator(NodeVisitor):
if action == VAR_LOAD_PARAMETER:
pass
elif action == VAR_LOAD_RESOLVE:
- self.writeline('%s = %s(%r)' %
- (target, self.get_resolve_func(), param))
+ self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param))
elif action == VAR_LOAD_ALIAS:
- self.writeline('%s = %s' % (target, param))
+ self.writeline("%s = %s" % (target, param))
elif action == VAR_LOAD_UNDEFINED:
undefs.append(target)
else:
- raise NotImplementedError('unknown load instruction')
+ raise NotImplementedError("unknown load instruction")
if undefs:
- self.writeline('%s = missing' % ' = '.join(undefs))
+ self.writeline("%s = missing" % " = ".join(undefs))
def leave_frame(self, frame, with_python_scope=False):
if not with_python_scope:
@@ -495,12 +497,12 @@ class CodeGenerator(NodeVisitor):
for target, _ in iteritems(frame.symbols.loads):
undefs.append(target)
if undefs:
- self.writeline('%s = missing' % ' = '.join(undefs))
+ self.writeline("%s = missing" % " = ".join(undefs))
def func(self, name):
if self.environment.is_async:
- return 'async def %s' % name
- return 'def %s' % name
+ return "async def %s" % name
+ return "def %s" % name
def macro_body(self, node, frame):
"""Dump the function def of a macro or call block."""
@@ -512,16 +514,16 @@ class CodeGenerator(NodeVisitor):
skip_special_params = set()
args = []
for idx, arg in enumerate(node.args):
- if arg.name == 'caller':
+ if arg.name == "caller":
explicit_caller = idx
- if arg.name in ('kwargs', 'varargs'):
+ if arg.name in ("kwargs", "varargs"):
skip_special_params.add(arg.name)
args.append(frame.symbols.ref(arg.name))
- undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs'))
+ undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
- if 'caller' in undeclared:
- # In older Jinja2 versions there was a bug that allowed caller
+ if "caller" in undeclared:
+ # In older Jinja versions there was a bug that allowed caller
# to retain the special behavior even if it was mentioned in
# the argument list. However thankfully this was only really
# working if it was the last argument. So we are explicitly
@@ -531,23 +533,26 @@ class CodeGenerator(NodeVisitor):
try:
node.defaults[explicit_caller - len(node.args)]
except IndexError:
- self.fail('When defining macros or call blocks the '
- 'special "caller" argument must be omitted '
- 'or be given a default.', node.lineno)
+ self.fail(
+ "When defining macros or call blocks the "
+ 'special "caller" argument must be omitted '
+ "or be given a default.",
+ node.lineno,
+ )
else:
- args.append(frame.symbols.declare_parameter('caller'))
+ args.append(frame.symbols.declare_parameter("caller"))
macro_ref.accesses_caller = True
- if 'kwargs' in undeclared and not 'kwargs' in skip_special_params:
- args.append(frame.symbols.declare_parameter('kwargs'))
+ if "kwargs" in undeclared and "kwargs" not in skip_special_params:
+ args.append(frame.symbols.declare_parameter("kwargs"))
macro_ref.accesses_kwargs = True
- if 'varargs' in undeclared and not 'varargs' in skip_special_params:
- args.append(frame.symbols.declare_parameter('varargs'))
+ if "varargs" in undeclared and "varargs" not in skip_special_params:
+ args.append(frame.symbols.declare_parameter("varargs"))
macro_ref.accesses_varargs = True
# macros are delayed, they never require output checks
frame.require_output_check = False
frame.symbols.analyze_node(node)
- self.writeline('%s(%s):' % (self.func('macro'), ', '.join(args)), node)
+ self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node)
self.indent()
self.buffer(frame)
@@ -556,17 +561,17 @@ class CodeGenerator(NodeVisitor):
self.push_parameter_definitions(frame)
for idx, arg in enumerate(node.args):
ref = frame.symbols.ref(arg.name)
- self.writeline('if %s is missing:' % ref)
+ self.writeline("if %s is missing:" % ref)
self.indent()
try:
default = node.defaults[idx - len(node.args)]
except IndexError:
- self.writeline('%s = undefined(%r, name=%r)' % (
- ref,
- 'parameter %r was not provided' % arg.name,
- arg.name))
+ self.writeline(
+ "%s = undefined(%r, name=%r)"
+ % (ref, "parameter %r was not provided" % arg.name, arg.name)
+ )
else:
- self.writeline('%s = ' % ref)
+ self.writeline("%s = " % ref)
self.visit(default, frame)
self.mark_parameter_stored(ref)
self.outdent()
@@ -581,35 +586,46 @@ class CodeGenerator(NodeVisitor):
def macro_def(self, macro_ref, frame):
"""Dump the macro definition for the def created by macro_body."""
- arg_tuple = ', '.join(repr(x.name) for x in macro_ref.node.args)
- name = getattr(macro_ref.node, 'name', None)
+ arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
+ name = getattr(macro_ref.node, "name", None)
if len(macro_ref.node.args) == 1:
- arg_tuple += ','
- self.write('Macro(environment, macro, %r, (%s), %r, %r, %r, '
- 'context.eval_ctx.autoescape)' %
- (name, arg_tuple, macro_ref.accesses_kwargs,
- macro_ref.accesses_varargs, macro_ref.accesses_caller))
+ arg_tuple += ","
+ self.write(
+ "Macro(environment, macro, %r, (%s), %r, %r, %r, "
+ "context.eval_ctx.autoescape)"
+ % (
+ name,
+ arg_tuple,
+ macro_ref.accesses_kwargs,
+ macro_ref.accesses_varargs,
+ macro_ref.accesses_caller,
+ )
+ )
def position(self, node):
"""Return a human readable position for the node."""
- rv = 'line %d' % node.lineno
+ rv = "line %d" % node.lineno
if self.name is not None:
- rv += ' in ' + repr(self.name)
+ rv += " in " + repr(self.name)
return rv
def dump_local_context(self, frame):
- return '{%s}' % ', '.join(
- '%r: %s' % (name, target) for name, target
- in iteritems(frame.symbols.dump_stores()))
+ return "{%s}" % ", ".join(
+ "%r: %s" % (name, target)
+ for name, target in iteritems(frame.symbols.dump_stores())
+ )
def write_commons(self):
"""Writes a common preamble that is used by root and block functions.
Primarily this sets up common local helpers and enforces a generator
through a dead branch.
"""
- self.writeline('resolve = context.resolve_or_missing')
- self.writeline('undefined = environment.undefined')
- self.writeline('if 0: yield None')
+ self.writeline("resolve = context.resolve_or_missing")
+ self.writeline("undefined = environment.undefined")
+ # always use the standard Undefined class for the implicit else of
+ # conditional expressions
+ self.writeline("cond_expr_undefined = Undefined")
+ self.writeline("if 0: yield None")
def push_parameter_definitions(self, frame):
"""Pushes all parameter targets from the given frame into a local
@@ -642,12 +658,12 @@ class CodeGenerator(NodeVisitor):
def get_resolve_func(self):
target = self._context_reference_stack[-1]
- if target == 'context':
- return 'resolve'
- return '%s.resolve' % target
+ if target == "context":
+ return "resolve"
+ return "%s.resolve" % target
def derive_context(self, frame):
- return '%s.derived(%s)' % (
+ return "%s.derived(%s)" % (
self.get_context_ref(),
self.dump_local_context(frame),
)
@@ -669,44 +685,48 @@ class CodeGenerator(NodeVisitor):
vars = self._assign_stack.pop()
if not frame.toplevel or not vars:
return
- public_names = [x for x in vars if x[:1] != '_']
+ public_names = [x for x in vars if x[:1] != "_"]
if len(vars) == 1:
name = next(iter(vars))
ref = frame.symbols.ref(name)
- self.writeline('context.vars[%r] = %s' % (name, ref))
+ self.writeline("context.vars[%r] = %s" % (name, ref))
else:
- self.writeline('context.vars.update({')
+ self.writeline("context.vars.update({")
for idx, name in enumerate(vars):
if idx:
- self.write(', ')
+ self.write(", ")
ref = frame.symbols.ref(name)
- self.write('%r: %s' % (name, ref))
- self.write('})')
+ self.write("%r: %s" % (name, ref))
+ self.write("})")
if public_names:
if len(public_names) == 1:
- self.writeline('context.exported_vars.add(%r)' %
- public_names[0])
+ self.writeline("context.exported_vars.add(%r)" % public_names[0])
else:
- self.writeline('context.exported_vars.update((%s))' %
- ', '.join(imap(repr, public_names)))
+ self.writeline(
+ "context.exported_vars.update((%s))"
+ % ", ".join(imap(repr, public_names))
+ )
# -- Statement Visitors
def visit_Template(self, node, frame=None):
- assert frame is None, 'no root frame allowed'
+ assert frame is None, "no root frame allowed"
eval_ctx = EvalContext(self.environment, self.name)
- from jinja2.runtime import __all__ as exported
- self.writeline('from __future__ import %s' % ', '.join(code_features))
- self.writeline('from jinja2.runtime import ' + ', '.join(exported))
+ from .runtime import exported
+
+ self.writeline("from __future__ import %s" % ", ".join(code_features))
+ self.writeline("from jinja2.runtime import " + ", ".join(exported))
if self.environment.is_async:
- self.writeline('from jinja2.asyncsupport import auto_await, '
- 'auto_aiter, make_async_loop_context')
+ self.writeline(
+ "from jinja2.asyncsupport import auto_await, "
+ "auto_aiter, AsyncLoopContext"
+ )
# if we want a deferred initialization we cannot move the
# environment into a local name
- envenv = not self.defer_init and ', environment=environment' or ''
+ envenv = not self.defer_init and ", environment=environment" or ""
# do we have an extends tag at all? If not, we can save some
# overhead by just not processing any inheritance code.
@@ -715,7 +735,7 @@ class CodeGenerator(NodeVisitor):
# find all blocks
for block in node.find_all(nodes.Block):
if block.name in self.blocks:
- self.fail('block %r defined twice' % block.name, block.lineno)
+ self.fail("block %r defined twice" % block.name, block.lineno)
self.blocks[block.name] = block
# find all imports and import them
@@ -723,32 +743,32 @@ class CodeGenerator(NodeVisitor):
if import_.importname not in self.import_aliases:
imp = import_.importname
self.import_aliases[imp] = alias = self.temporary_identifier()
- if '.' in imp:
- module, obj = imp.rsplit('.', 1)
- self.writeline('from %s import %s as %s' %
- (module, obj, alias))
+ if "." in imp:
+ module, obj = imp.rsplit(".", 1)
+ self.writeline("from %s import %s as %s" % (module, obj, alias))
else:
- self.writeline('import %s as %s' % (imp, alias))
+ self.writeline("import %s as %s" % (imp, alias))
# add the load name
- self.writeline('name = %r' % self.name)
+ self.writeline("name = %r" % self.name)
# generate the root render function.
- self.writeline('%s(context, missing=missing%s):' %
- (self.func('root'), envenv), extra=1)
+ self.writeline(
+ "%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1
+ )
self.indent()
self.write_commons()
# process the root
frame = Frame(eval_ctx)
- if 'self' in find_undeclared(node.body, ('self',)):
- ref = frame.symbols.declare_parameter('self')
- self.writeline('%s = TemplateReference(context)' % ref)
+ if "self" in find_undeclared(node.body, ("self",)):
+ ref = frame.symbols.declare_parameter("self")
+ self.writeline("%s = TemplateReference(context)" % ref)
frame.symbols.analyze_node(node)
frame.toplevel = frame.rootlevel = True
frame.require_output_check = have_extends and not self.has_known_extends
if have_extends:
- self.writeline('parent_template = None')
+ self.writeline("parent_template = None")
self.enter_frame(frame)
self.pull_dependencies(node.body)
self.blockvisit(node.body, frame)
@@ -759,39 +779,42 @@ class CodeGenerator(NodeVisitor):
if have_extends:
if not self.has_known_extends:
self.indent()
- self.writeline('if parent_template is not None:')
+ self.writeline("if parent_template is not None:")
self.indent()
if supports_yield_from and not self.environment.is_async:
- self.writeline('yield from parent_template.'
- 'root_render_func(context)')
+ self.writeline("yield from parent_template.root_render_func(context)")
else:
- self.writeline('%sfor event in parent_template.'
- 'root_render_func(context):' %
- (self.environment.is_async and 'async ' or ''))
+ self.writeline(
+ "%sfor event in parent_template."
+ "root_render_func(context):"
+ % (self.environment.is_async and "async " or "")
+ )
self.indent()
- self.writeline('yield event')
+ self.writeline("yield event")
self.outdent()
self.outdent(1 + (not self.has_known_extends))
# at this point we now have the blocks collected and can visit them too.
for name, block in iteritems(self.blocks):
- self.writeline('%s(context, missing=missing%s):' %
- (self.func('block_' + name), envenv),
- block, 1)
+ self.writeline(
+ "%s(context, missing=missing%s):"
+ % (self.func("block_" + name), envenv),
+ block,
+ 1,
+ )
self.indent()
self.write_commons()
# It's important that we do not make this frame a child of the
# toplevel template. This would cause a variety of
# interesting issues with identifier tracking.
block_frame = Frame(eval_ctx)
- undeclared = find_undeclared(block.body, ('self', 'super'))
- if 'self' in undeclared:
- ref = block_frame.symbols.declare_parameter('self')
- self.writeline('%s = TemplateReference(context)' % ref)
- if 'super' in undeclared:
- ref = block_frame.symbols.declare_parameter('super')
- self.writeline('%s = context.super(%r, '
- 'block_%s)' % (ref, name, name))
+ undeclared = find_undeclared(block.body, ("self", "super"))
+ if "self" in undeclared:
+ ref = block_frame.symbols.declare_parameter("self")
+ self.writeline("%s = TemplateReference(context)" % ref)
+ if "super" in undeclared:
+ ref = block_frame.symbols.declare_parameter("super")
+ self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name))
block_frame.symbols.analyze_node(block)
block_frame.block = name
self.enter_frame(block_frame)
@@ -800,13 +823,15 @@ class CodeGenerator(NodeVisitor):
self.leave_frame(block_frame, with_python_scope=True)
self.outdent()
- self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x)
- for x in self.blocks),
- extra=1)
+ self.writeline(
+ "blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks),
+ extra=1,
+ )
# add a function that returns the debug info
- self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x
- in self.debug_info))
+ self.writeline(
+ "debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info)
+ )
def visit_Block(self, node, frame):
"""Call a block and register it for the template."""
@@ -817,7 +842,7 @@ class CodeGenerator(NodeVisitor):
if self.has_known_extends:
return
if self.extends_so_far > 0:
- self.writeline('if parent_template is None:')
+ self.writeline("if parent_template is None:")
self.indent()
level += 1
@@ -826,16 +851,22 @@ class CodeGenerator(NodeVisitor):
else:
context = self.get_context_ref()
- if supports_yield_from and not self.environment.is_async and \
- frame.buffer is None:
- self.writeline('yield from context.blocks[%r][0](%s)' % (
- node.name, context), node)
+ if (
+ supports_yield_from
+ and not self.environment.is_async
+ and frame.buffer is None
+ ):
+ self.writeline(
+ "yield from context.blocks[%r][0](%s)" % (node.name, context), node
+ )
else:
- loop = self.environment.is_async and 'async for' or 'for'
- self.writeline('%s event in context.blocks[%r][0](%s):' % (
- loop, node.name, context), node)
+ loop = self.environment.is_async and "async for" or "for"
+ self.writeline(
+ "%s event in context.blocks[%r][0](%s):" % (loop, node.name, context),
+ node,
+ )
self.indent()
- self.simple_write('event', frame)
+ self.simple_write("event", frame)
self.outdent()
self.outdent(level)
@@ -843,8 +874,7 @@ class CodeGenerator(NodeVisitor):
def visit_Extends(self, node, frame):
"""Calls the extender."""
if not frame.toplevel:
- self.fail('cannot use extend from a non top-level scope',
- node.lineno)
+ self.fail("cannot use extend from a non top-level scope", node.lineno)
# if the number of extends statements in general is zero so
# far, we don't have to add a check if something extended
@@ -856,10 +886,9 @@ class CodeGenerator(NodeVisitor):
# time too, but i welcome it not to confuse users by throwing the
# same error at different times just "because we can".
if not self.has_known_extends:
- self.writeline('if parent_template is not None:')
+ self.writeline("if parent_template is not None:")
self.indent()
- self.writeline('raise TemplateRuntimeError(%r)' %
- 'extended multiple times')
+ self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times")
# if we have a known extends already we don't need that code here
# as we know that the template execution will end here.
@@ -868,14 +897,14 @@ class CodeGenerator(NodeVisitor):
else:
self.outdent()
- self.writeline('parent_template = environment.get_template(', node)
+ self.writeline("parent_template = environment.get_template(", node)
self.visit(node.template, frame)
- self.write(', %r)' % self.name)
- self.writeline('for name, parent_block in parent_template.'
- 'blocks.%s():' % dict_item_iter)
+ self.write(", %r)" % self.name)
+ self.writeline(
+ "for name, parent_block in parent_template.blocks.%s():" % dict_item_iter
+ )
self.indent()
- self.writeline('context.blocks.setdefault(name, []).'
- 'append(parent_block)')
+ self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
self.outdent()
# if this extends statement was in the root level we can take
@@ -890,52 +919,56 @@ class CodeGenerator(NodeVisitor):
def visit_Include(self, node, frame):
"""Handles includes."""
if node.ignore_missing:
- self.writeline('try:')
+ self.writeline("try:")
self.indent()
- func_name = 'get_or_select_template'
+ func_name = "get_or_select_template"
if isinstance(node.template, nodes.Const):
if isinstance(node.template.value, string_types):
- func_name = 'get_template'
+ func_name = "get_template"
elif isinstance(node.template.value, (tuple, list)):
- func_name = 'select_template'
+ func_name = "select_template"
elif isinstance(node.template, (nodes.Tuple, nodes.List)):
- func_name = 'select_template'
+ func_name = "select_template"
- self.writeline('template = environment.%s(' % func_name, node)
+ self.writeline("template = environment.%s(" % func_name, node)
self.visit(node.template, frame)
- self.write(', %r)' % self.name)
+ self.write(", %r)" % self.name)
if node.ignore_missing:
self.outdent()
- self.writeline('except TemplateNotFound:')
+ self.writeline("except TemplateNotFound:")
self.indent()
- self.writeline('pass')
+ self.writeline("pass")
self.outdent()
- self.writeline('else:')
+ self.writeline("else:")
self.indent()
skip_event_yield = False
if node.with_context:
- loop = self.environment.is_async and 'async for' or 'for'
- self.writeline('%s event in template.root_render_func('
- 'template.new_context(context.get_all(), True, '
- '%s)):' % (loop, self.dump_local_context(frame)))
+ loop = self.environment.is_async and "async for" or "for"
+ self.writeline(
+ "%s event in template.root_render_func("
+ "template.new_context(context.get_all(), True, "
+ "%s)):" % (loop, self.dump_local_context(frame))
+ )
elif self.environment.is_async:
- self.writeline('for event in (await '
- 'template._get_default_module_async())'
- '._body_stream:')
+ self.writeline(
+ "for event in (await "
+ "template._get_default_module_async())"
+ "._body_stream:"
+ )
else:
if supports_yield_from:
- self.writeline('yield from template._get_default_module()'
- '._body_stream')
+ self.writeline("yield from template._get_default_module()._body_stream")
skip_event_yield = True
else:
- self.writeline('for event in template._get_default_module()'
- '._body_stream:')
+ self.writeline(
+ "for event in template._get_default_module()._body_stream:"
+ )
if not skip_event_yield:
self.indent()
- self.simple_write('event', frame)
+ self.simple_write("event", frame)
self.outdent()
if node.ignore_missing:
@@ -943,40 +976,50 @@ class CodeGenerator(NodeVisitor):
def visit_Import(self, node, frame):
"""Visit regular imports."""
- self.writeline('%s = ' % frame.symbols.ref(node.target), node)
+ self.writeline("%s = " % frame.symbols.ref(node.target), node)
if frame.toplevel:
- self.write('context.vars[%r] = ' % node.target)
+ self.write("context.vars[%r] = " % node.target)
if self.environment.is_async:
- self.write('await ')
- self.write('environment.get_template(')
+ self.write("await ")
+ self.write("environment.get_template(")
self.visit(node.template, frame)
- self.write(', %r).' % self.name)
+ self.write(", %r)." % self.name)
if node.with_context:
- self.write('make_module%s(context.get_all(), True, %s)'
- % (self.environment.is_async and '_async' or '',
- self.dump_local_context(frame)))
+ self.write(
+ "make_module%s(context.get_all(), True, %s)"
+ % (
+ self.environment.is_async and "_async" or "",
+ self.dump_local_context(frame),
+ )
+ )
elif self.environment.is_async:
- self.write('_get_default_module_async()')
+ self.write("_get_default_module_async()")
else:
- self.write('_get_default_module()')
- if frame.toplevel and not node.target.startswith('_'):
- self.writeline('context.exported_vars.discard(%r)' % node.target)
+ self.write("_get_default_module()")
+ if frame.toplevel and not node.target.startswith("_"):
+ self.writeline("context.exported_vars.discard(%r)" % node.target)
def visit_FromImport(self, node, frame):
"""Visit named imports."""
self.newline(node)
- self.write('included_template = %senvironment.get_template('
- % (self.environment.is_async and 'await ' or ''))
+ self.write(
+ "included_template = %senvironment.get_template("
+ % (self.environment.is_async and "await " or "")
+ )
self.visit(node.template, frame)
- self.write(', %r).' % self.name)
+ self.write(", %r)." % self.name)
if node.with_context:
- self.write('make_module%s(context.get_all(), True, %s)'
- % (self.environment.is_async and '_async' or '',
- self.dump_local_context(frame)))
+ self.write(
+ "make_module%s(context.get_all(), True, %s)"
+ % (
+ self.environment.is_async and "_async" or "",
+ self.dump_local_context(frame),
+ )
+ )
elif self.environment.is_async:
- self.write('_get_default_module_async()')
+ self.write("_get_default_module_async()")
else:
- self.write('_get_default_module()')
+ self.write("_get_default_module()")
var_names = []
discarded_names = []
@@ -985,41 +1028,51 @@ class CodeGenerator(NodeVisitor):
name, alias = name
else:
alias = name
- self.writeline('%s = getattr(included_template, '
- '%r, missing)' % (frame.symbols.ref(alias), name))
- self.writeline('if %s is missing:' % frame.symbols.ref(alias))
+ self.writeline(
+ "%s = getattr(included_template, "
+ "%r, missing)" % (frame.symbols.ref(alias), name)
+ )
+ self.writeline("if %s is missing:" % frame.symbols.ref(alias))
self.indent()
- self.writeline('%s = undefined(%r %% '
- 'included_template.__name__, '
- 'name=%r)' %
- (frame.symbols.ref(alias),
- 'the template %%r (imported on %s) does '
- 'not export the requested name %s' % (
- self.position(node),
- repr(name)
- ), name))
+ self.writeline(
+ "%s = undefined(%r %% "
+ "included_template.__name__, "
+ "name=%r)"
+ % (
+ frame.symbols.ref(alias),
+ "the template %%r (imported on %s) does "
+ "not export the requested name %s"
+ % (self.position(node), repr(name)),
+ name,
+ )
+ )
self.outdent()
if frame.toplevel:
var_names.append(alias)
- if not alias.startswith('_'):
+ if not alias.startswith("_"):
discarded_names.append(alias)
if var_names:
if len(var_names) == 1:
name = var_names[0]
- self.writeline('context.vars[%r] = %s' %
- (name, frame.symbols.ref(name)))
+ self.writeline(
+ "context.vars[%r] = %s" % (name, frame.symbols.ref(name))
+ )
else:
- self.writeline('context.vars.update({%s})' % ', '.join(
- '%r: %s' % (name, frame.symbols.ref(name)) for name in var_names
- ))
+ self.writeline(
+ "context.vars.update({%s})"
+ % ", ".join(
+ "%r: %s" % (name, frame.symbols.ref(name)) for name in var_names
+ )
+ )
if discarded_names:
if len(discarded_names) == 1:
- self.writeline('context.exported_vars.discard(%r)' %
- discarded_names[0])
+ self.writeline("context.exported_vars.discard(%r)" % discarded_names[0])
else:
- self.writeline('context.exported_vars.difference_'
- 'update((%s))' % ', '.join(imap(repr, discarded_names)))
+ self.writeline(
+ "context.exported_vars.difference_"
+ "update((%s))" % ", ".join(imap(repr, discarded_names))
+ )
def visit_For(self, node, frame):
loop_frame = frame.inner()
@@ -1029,35 +1082,35 @@ class CodeGenerator(NodeVisitor):
# try to figure out if we have an extended loop. An extended loop
# is necessary if the loop is in recursive mode if the special loop
# variable is accessed in the body.
- extended_loop = node.recursive or 'loop' in \
- find_undeclared(node.iter_child_nodes(
- only=('body',)), ('loop',))
+ extended_loop = node.recursive or "loop" in find_undeclared(
+ node.iter_child_nodes(only=("body",)), ("loop",)
+ )
loop_ref = None
if extended_loop:
- loop_ref = loop_frame.symbols.declare_parameter('loop')
+ loop_ref = loop_frame.symbols.declare_parameter("loop")
- loop_frame.symbols.analyze_node(node, for_branch='body')
+ loop_frame.symbols.analyze_node(node, for_branch="body")
if node.else_:
- else_frame.symbols.analyze_node(node, for_branch='else')
+ else_frame.symbols.analyze_node(node, for_branch="else")
if node.test:
loop_filter_func = self.temporary_identifier()
- test_frame.symbols.analyze_node(node, for_branch='test')
- self.writeline('%s(fiter):' % self.func(loop_filter_func), node.test)
+ test_frame.symbols.analyze_node(node, for_branch="test")
+ self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test)
self.indent()
self.enter_frame(test_frame)
- self.writeline(self.environment.is_async and 'async for ' or 'for ')
+ self.writeline(self.environment.is_async and "async for " or "for ")
self.visit(node.target, loop_frame)
- self.write(' in ')
- self.write(self.environment.is_async and 'auto_aiter(fiter)' or 'fiter')
- self.write(':')
+ self.write(" in ")
+ self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter")
+ self.write(":")
self.indent()
- self.writeline('if ', node.test)
+ self.writeline("if ", node.test)
self.visit(node.test, test_frame)
- self.write(':')
+ self.write(":")
self.indent()
- self.writeline('yield ')
+ self.writeline("yield ")
self.visit(node.target, loop_frame)
self.outdent(3)
self.leave_frame(test_frame, with_python_scope=True)
@@ -1066,8 +1119,9 @@ class CodeGenerator(NodeVisitor):
# variables at that point. Because loops can be nested but the loop
# variable is a special one we have to enforce aliasing for it.
if node.recursive:
- self.writeline('%s(reciter, loop_render_func, depth=0):' %
- self.func('loop'), node)
+ self.writeline(
+ "%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node
+ )
self.indent()
self.buffer(loop_frame)
@@ -1077,57 +1131,60 @@ class CodeGenerator(NodeVisitor):
# make sure the loop variable is a special one and raise a template
# assertion error if a loop tries to write to loop
if extended_loop:
- self.writeline('%s = missing' % loop_ref)
+ self.writeline("%s = missing" % loop_ref)
for name in node.find_all(nodes.Name):
- if name.ctx == 'store' and name.name == 'loop':
- self.fail('Can\'t assign to special loop variable '
- 'in for-loop target', name.lineno)
+ if name.ctx == "store" and name.name == "loop":
+ self.fail(
+ "Can't assign to special loop variable in for-loop target",
+ name.lineno,
+ )
if node.else_:
iteration_indicator = self.temporary_identifier()
- self.writeline('%s = 1' % iteration_indicator)
+ self.writeline("%s = 1" % iteration_indicator)
- self.writeline(self.environment.is_async and 'async for ' or 'for ', node)
+ self.writeline(self.environment.is_async and "async for " or "for ", node)
self.visit(node.target, loop_frame)
if extended_loop:
if self.environment.is_async:
- self.write(', %s in await make_async_loop_context(' % loop_ref)
+ self.write(", %s in AsyncLoopContext(" % loop_ref)
else:
- self.write(', %s in LoopContext(' % loop_ref)
+ self.write(", %s in LoopContext(" % loop_ref)
else:
- self.write(' in ')
+ self.write(" in ")
if node.test:
- self.write('%s(' % loop_filter_func)
+ self.write("%s(" % loop_filter_func)
if node.recursive:
- self.write('reciter')
+ self.write("reciter")
else:
if self.environment.is_async and not extended_loop:
- self.write('auto_aiter(')
+ self.write("auto_aiter(")
self.visit(node.iter, frame)
if self.environment.is_async and not extended_loop:
- self.write(')')
+ self.write(")")
if node.test:
- self.write(')')
+ self.write(")")
if node.recursive:
- self.write(', undefined, loop_render_func, depth):')
+ self.write(", undefined, loop_render_func, depth):")
else:
- self.write(extended_loop and ', undefined):' or ':')
+ self.write(extended_loop and ", undefined):" or ":")
self.indent()
self.enter_frame(loop_frame)
self.blockvisit(node.body, loop_frame)
if node.else_:
- self.writeline('%s = 0' % iteration_indicator)
+ self.writeline("%s = 0" % iteration_indicator)
self.outdent()
- self.leave_frame(loop_frame, with_python_scope=node.recursive
- and not node.else_)
+ self.leave_frame(
+ loop_frame, with_python_scope=node.recursive and not node.else_
+ )
if node.else_:
- self.writeline('if %s:' % iteration_indicator)
+ self.writeline("if %s:" % iteration_indicator)
self.indent()
self.enter_frame(else_frame)
self.blockvisit(node.else_, else_frame)
@@ -1141,33 +1198,33 @@ class CodeGenerator(NodeVisitor):
self.outdent()
self.start_write(frame, node)
if self.environment.is_async:
- self.write('await ')
- self.write('loop(')
+ self.write("await ")
+ self.write("loop(")
if self.environment.is_async:
- self.write('auto_aiter(')
+ self.write("auto_aiter(")
self.visit(node.iter, frame)
if self.environment.is_async:
- self.write(')')
- self.write(', loop)')
+ self.write(")")
+ self.write(", loop)")
self.end_write(frame)
def visit_If(self, node, frame):
if_frame = frame.soft()
- self.writeline('if ', node)
+ self.writeline("if ", node)
self.visit(node.test, if_frame)
- self.write(':')
+ self.write(":")
self.indent()
self.blockvisit(node.body, if_frame)
self.outdent()
for elif_ in node.elif_:
- self.writeline('elif ', elif_)
+ self.writeline("elif ", elif_)
self.visit(elif_.test, if_frame)
- self.write(':')
+ self.write(":")
self.indent()
self.blockvisit(elif_.body, if_frame)
self.outdent()
if node.else_:
- self.writeline('else:')
+ self.writeline("else:")
self.indent()
self.blockvisit(node.else_, if_frame)
self.outdent()
@@ -1176,16 +1233,15 @@ class CodeGenerator(NodeVisitor):
macro_frame, macro_ref = self.macro_body(node, frame)
self.newline()
if frame.toplevel:
- if not node.name.startswith('_'):
- self.write('context.exported_vars.add(%r)' % node.name)
- ref = frame.symbols.ref(node.name)
- self.writeline('context.vars[%r] = ' % node.name)
- self.write('%s = ' % frame.symbols.ref(node.name))
+ if not node.name.startswith("_"):
+ self.write("context.exported_vars.add(%r)" % node.name)
+ self.writeline("context.vars[%r] = " % node.name)
+ self.write("%s = " % frame.symbols.ref(node.name))
self.macro_def(macro_ref, macro_frame)
def visit_CallBlock(self, node, frame):
call_frame, macro_ref = self.macro_body(node, frame)
- self.writeline('caller = ')
+ self.writeline("caller = ")
self.macro_def(macro_ref, call_frame)
self.start_write(frame, node)
self.visit_Call(node.call, frame, forward_caller=True)
@@ -1206,10 +1262,10 @@ class CodeGenerator(NodeVisitor):
with_frame = frame.inner()
with_frame.symbols.analyze_node(node)
self.enter_frame(with_frame)
- for idx, (target, expr) in enumerate(izip(node.targets, node.values)):
+ for target, expr in izip(node.targets, node.values):
self.newline()
self.visit(target, with_frame)
- self.write(' = ')
+ self.write(" = ")
self.visit(expr, frame)
self.blockvisit(node.body, with_frame)
self.leave_frame(with_frame)
@@ -1218,156 +1274,187 @@ class CodeGenerator(NodeVisitor):
self.newline(node)
self.visit(node.node, frame)
- def visit_Output(self, node, frame):
- # if we have a known extends statement, we don't output anything
- # if we are in a require_output_check section
- if self.has_known_extends and frame.require_output_check:
- return
+ _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src"))
+ #: The default finalize function if the environment isn't configured
+ #: with one. Or if the environment has one, this is called on that
+ #: function's output for constants.
+ _default_finalize = text_type
+ _finalize = None
+
+ def _make_finalize(self):
+ """Build the finalize function to be used on constants and at
+ runtime. Cached so it's only created once for all output nodes.
+
+ Returns a ``namedtuple`` with the following attributes:
+
+ ``const``
+ A function to finalize constant data at compile time.
+
+ ``src``
+ Source code to output around nodes to be evaluated at
+ runtime.
+ """
+ if self._finalize is not None:
+ return self._finalize
+
+ finalize = default = self._default_finalize
+ src = None
- allow_constant_finalize = True
if self.environment.finalize:
- func = self.environment.finalize
- if getattr(func, 'contextfunction', False) or \
- getattr(func, 'evalcontextfunction', False):
- allow_constant_finalize = False
- elif getattr(func, 'environmentfunction', False):
- finalize = lambda x: text_type(
- self.environment.finalize(self.environment, x))
- else:
- finalize = lambda x: text_type(self.environment.finalize(x))
+ src = "environment.finalize("
+ env_finalize = self.environment.finalize
+
+ def finalize(value):
+ return default(env_finalize(value))
+
+ if getattr(env_finalize, "contextfunction", False) is True:
+ src += "context, "
+ finalize = None # noqa: F811
+ elif getattr(env_finalize, "evalcontextfunction", False) is True:
+ src += "context.eval_ctx, "
+ finalize = None
+ elif getattr(env_finalize, "environmentfunction", False) is True:
+ src += "environment, "
+
+ def finalize(value):
+ return default(env_finalize(self.environment, value))
+
+ self._finalize = self._FinalizeInfo(finalize, src)
+ return self._finalize
+
+ def _output_const_repr(self, group):
+ """Given a group of constant values converted from ``Output``
+ child nodes, produce a string to write to the template module
+ source.
+ """
+ return repr(concat(group))
+
+ def _output_child_to_const(self, node, frame, finalize):
+ """Try to optimize a child of an ``Output`` node by trying to
+ convert it to constant, finalized data at compile time.
+
+ If :exc:`Impossible` is raised, the node is not constant and
+ will be evaluated at runtime. Any other exception will also be
+ evaluated at runtime for easier debugging.
+ """
+ const = node.as_const(frame.eval_ctx)
+
+ if frame.eval_ctx.autoescape:
+ const = escape(const)
+
+ # Template data doesn't go through finalize.
+ if isinstance(node, nodes.TemplateData):
+ return text_type(const)
+
+ return finalize.const(const)
+
+ def _output_child_pre(self, node, frame, finalize):
+ """Output extra source code before visiting a child of an
+ ``Output`` node.
+ """
+ if frame.eval_ctx.volatile:
+ self.write("(escape if context.eval_ctx.autoescape else to_string)(")
+ elif frame.eval_ctx.autoescape:
+ self.write("escape(")
else:
- finalize = text_type
+ self.write("to_string(")
+
+ if finalize.src is not None:
+ self.write(finalize.src)
+
+ def _output_child_post(self, node, frame, finalize):
+ """Output extra source code after visiting a child of an
+ ``Output`` node.
+ """
+ self.write(")")
+
+ if finalize.src is not None:
+ self.write(")")
- # if we are inside a frame that requires output checking, we do so
- outdent_later = False
+ def visit_Output(self, node, frame):
+ # If an extends is active, don't render outside a block.
if frame.require_output_check:
- self.writeline('if parent_template is None:')
+ # A top-level extends is known to exist at compile time.
+ if self.has_known_extends:
+ return
+
+ self.writeline("if parent_template is None:")
self.indent()
- outdent_later = True
- # try to evaluate as many chunks as possible into a static
- # string at compile time.
+ finalize = self._make_finalize()
body = []
+
+ # Evaluate constants at compile time if possible. Each item in
+ # body will be either a list of static data or a node to be
+ # evaluated at runtime.
for child in node.nodes:
try:
- if not allow_constant_finalize:
+ if not (
+ # If the finalize function requires runtime context,
+ # constants can't be evaluated at compile time.
+ finalize.const
+ # Unless it's basic template data that won't be
+ # finalized anyway.
+ or isinstance(child, nodes.TemplateData)
+ ):
raise nodes.Impossible()
- const = child.as_const(frame.eval_ctx)
- except nodes.Impossible:
- body.append(child)
- continue
- # the frame can't be volatile here, becaus otherwise the
- # as_const() function would raise an Impossible exception
- # at that point.
- try:
- if frame.eval_ctx.autoescape:
- if hasattr(const, '__html__'):
- const = const.__html__()
- else:
- const = escape(const)
- const = finalize(const)
- except Exception:
- # if something goes wrong here we evaluate the node
- # at runtime for easier debugging
+
+ const = self._output_child_to_const(child, frame, finalize)
+ except (nodes.Impossible, Exception):
+ # The node was not constant and needs to be evaluated at
+ # runtime. Or another error was raised, which is easier
+ # to debug at runtime.
body.append(child)
continue
+
if body and isinstance(body[-1], list):
body[-1].append(const)
else:
body.append([const])
- # if we have less than 3 nodes or a buffer we yield or extend/append
- if len(body) < 3 or frame.buffer is not None:
- if frame.buffer is not None:
- # for one item we append, for more we extend
- if len(body) == 1:
- self.writeline('%s.append(' % frame.buffer)
+ if frame.buffer is not None:
+ if len(body) == 1:
+ self.writeline("%s.append(" % frame.buffer)
+ else:
+ self.writeline("%s.extend((" % frame.buffer)
+
+ self.indent()
+
+ for item in body:
+ if isinstance(item, list):
+ # A group of constant data to join and output.
+ val = self._output_const_repr(item)
+
+ if frame.buffer is None:
+ self.writeline("yield " + val)
else:
- self.writeline('%s.extend((' % frame.buffer)
- self.indent()
- for item in body:
- if isinstance(item, list):
- val = repr(concat(item))
- if frame.buffer is None:
- self.writeline('yield ' + val)
- else:
- self.writeline(val + ',')
+ self.writeline(val + ",")
+ else:
+ if frame.buffer is None:
+ self.writeline("yield ", item)
else:
- if frame.buffer is None:
- self.writeline('yield ', item)
- else:
- self.newline(item)
- close = 1
- if frame.eval_ctx.volatile:
- self.write('(escape if context.eval_ctx.autoescape'
- ' else to_string)(')
- elif frame.eval_ctx.autoescape:
- self.write('escape(')
- else:
- self.write('to_string(')
- if self.environment.finalize is not None:
- self.write('environment.finalize(')
- if getattr(self.environment.finalize,
- "contextfunction", False):
- self.write('context, ')
- close += 1
- self.visit(item, frame)
- self.write(')' * close)
- if frame.buffer is not None:
- self.write(',')
- if frame.buffer is not None:
- # close the open parentheses
- self.outdent()
- self.writeline(len(body) == 1 and ')' or '))')
+ self.newline(item)
- # otherwise we create a format string as this is faster in that case
- else:
- format = []
- arguments = []
- for item in body:
- if isinstance(item, list):
- format.append(concat(item).replace('%', '%%'))
- else:
- format.append('%s')
- arguments.append(item)
- self.writeline('yield ')
- self.write(repr(concat(format)) + ' % (')
- self.indent()
- for argument in arguments:
- self.newline(argument)
- close = 0
- if frame.eval_ctx.volatile:
- self.write('(escape if context.eval_ctx.autoescape else'
- ' to_string)(')
- close += 1
- elif frame.eval_ctx.autoescape:
- self.write('escape(')
- close += 1
- if self.environment.finalize is not None:
- self.write('environment.finalize(')
- if getattr(self.environment.finalize,
- 'contextfunction', False):
- self.write('context, ')
- elif getattr(self.environment.finalize,
- 'evalcontextfunction', False):
- self.write('context.eval_ctx, ')
- elif getattr(self.environment.finalize,
- 'environmentfunction', False):
- self.write('environment, ')
- close += 1
- self.visit(argument, frame)
- self.write(')' * close + ', ')
+ # A node to be evaluated at runtime.
+ self._output_child_pre(item, frame, finalize)
+ self.visit(item, frame)
+ self._output_child_post(item, frame, finalize)
+
+ if frame.buffer is not None:
+ self.write(",")
+
+ if frame.buffer is not None:
self.outdent()
- self.writeline(')')
+ self.writeline(")" if len(body) == 1 else "))")
- if outdent_later:
+ if frame.require_output_check:
self.outdent()
def visit_Assign(self, node, frame):
self.push_assign_tracking()
self.newline(node)
self.visit(node.target, frame)
- self.write(' = ')
+ self.write(" = ")
self.visit(node.node, frame)
self.pop_assign_tracking(frame)
@@ -1384,20 +1471,19 @@ class CodeGenerator(NodeVisitor):
self.blockvisit(node.body, block_frame)
self.newline(node)
self.visit(node.target, frame)
- self.write(' = (Markup if context.eval_ctx.autoescape '
- 'else identity)(')
+ self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
if node.filter is not None:
self.visit_Filter(node.filter, block_frame)
else:
- self.write('concat(%s)' % block_frame.buffer)
- self.write(')')
+ self.write("concat(%s)" % block_frame.buffer)
+ self.write(")")
self.pop_assign_tracking(frame)
self.leave_frame(block_frame)
# -- Expression Visitors
def visit_Name(self, node, frame):
- if node.ctx == 'store' and frame.toplevel:
+ if node.ctx == "store" and frame.toplevel:
if self._assign_stack:
self._assign_stack[-1].add(node.name)
ref = frame.symbols.ref(node.name)
@@ -1405,12 +1491,17 @@ class CodeGenerator(NodeVisitor):
# If we are looking up a variable we might have to deal with the
# case where it's undefined. We can skip that case if the load
# instruction indicates a parameter which are always defined.
- if node.ctx == 'load':
+ if node.ctx == "load":
load = frame.symbols.find_load(ref)
- if not (load is not None and load[0] == VAR_LOAD_PARAMETER and \
- not self.parameter_is_undeclared(ref)):
- self.write('(undefined(name=%r) if %s is missing else %s)' %
- (node.name, ref, ref))
+ if not (
+ load is not None
+ and load[0] == VAR_LOAD_PARAMETER
+ and not self.parameter_is_undeclared(ref)
+ ):
+ self.write(
+ "(undefined(name=%r) if %s is missing else %s)"
+ % (node.name, ref, ref)
+ )
return
self.write(ref)
@@ -1420,12 +1511,14 @@ class CodeGenerator(NodeVisitor):
# `foo.bar` notation they will be parsed as a normal attribute access
# when used anywhere but in a `set` context
ref = frame.symbols.ref(node.name)
- self.writeline('if not isinstance(%s, Namespace):' % ref)
+ self.writeline("if not isinstance(%s, Namespace):" % ref)
self.indent()
- self.writeline('raise TemplateRuntimeError(%r)' %
- 'cannot assign attribute on non-namespace object')
+ self.writeline(
+ "raise TemplateRuntimeError(%r)"
+ % "cannot assign attribute on non-namespace object"
+ )
self.outdent()
- self.writeline('%s[%r]' % (ref, node.attr))
+ self.writeline("%s[%r]" % (ref, node.attr))
def visit_Const(self, node, frame):
val = node.as_const(frame.eval_ctx)
@@ -1438,230 +1531,256 @@ class CodeGenerator(NodeVisitor):
try:
self.write(repr(node.as_const(frame.eval_ctx)))
except nodes.Impossible:
- self.write('(Markup if context.eval_ctx.autoescape else identity)(%r)'
- % node.data)
+ self.write(
+ "(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data
+ )
def visit_Tuple(self, node, frame):
- self.write('(')
+ self.write("(")
idx = -1
for idx, item in enumerate(node.items):
if idx:
- self.write(', ')
+ self.write(", ")
self.visit(item, frame)
- self.write(idx == 0 and ',)' or ')')
+ self.write(idx == 0 and ",)" or ")")
def visit_List(self, node, frame):
- self.write('[')
+ self.write("[")
for idx, item in enumerate(node.items):
if idx:
- self.write(', ')
+ self.write(", ")
self.visit(item, frame)
- self.write(']')
+ self.write("]")
def visit_Dict(self, node, frame):
- self.write('{')
+ self.write("{")
for idx, item in enumerate(node.items):
if idx:
- self.write(', ')
+ self.write(", ")
self.visit(item.key, frame)
- self.write(': ')
+ self.write(": ")
self.visit(item.value, frame)
- self.write('}')
+ self.write("}")
- def binop(operator, interceptable=True):
+ def binop(operator, interceptable=True): # noqa: B902
@optimizeconst
def visitor(self, node, frame):
- if self.environment.sandboxed and \
- operator in self.environment.intercepted_binops:
- self.write('environment.call_binop(context, %r, ' % operator)
+ if (
+ self.environment.sandboxed
+ and operator in self.environment.intercepted_binops
+ ):
+ self.write("environment.call_binop(context, %r, " % operator)
self.visit(node.left, frame)
- self.write(', ')
+ self.write(", ")
self.visit(node.right, frame)
else:
- self.write('(')
+ self.write("(")
self.visit(node.left, frame)
- self.write(' %s ' % operator)
+ self.write(" %s " % operator)
self.visit(node.right, frame)
- self.write(')')
+ self.write(")")
+
return visitor
- def uaop(operator, interceptable=True):
+ def uaop(operator, interceptable=True): # noqa: B902
@optimizeconst
def visitor(self, node, frame):
- if self.environment.sandboxed and \
- operator in self.environment.intercepted_unops:
- self.write('environment.call_unop(context, %r, ' % operator)
+ if (
+ self.environment.sandboxed
+ and operator in self.environment.intercepted_unops
+ ):
+ self.write("environment.call_unop(context, %r, " % operator)
self.visit(node.node, frame)
else:
- self.write('(' + operator)
+ self.write("(" + operator)
self.visit(node.node, frame)
- self.write(')')
+ self.write(")")
+
return visitor
- visit_Add = binop('+')
- visit_Sub = binop('-')
- visit_Mul = binop('*')
- visit_Div = binop('/')
- visit_FloorDiv = binop('//')
- visit_Pow = binop('**')
- visit_Mod = binop('%')
- visit_And = binop('and', interceptable=False)
- visit_Or = binop('or', interceptable=False)
- visit_Pos = uaop('+')
- visit_Neg = uaop('-')
- visit_Not = uaop('not ', interceptable=False)
+ visit_Add = binop("+")
+ visit_Sub = binop("-")
+ visit_Mul = binop("*")
+ visit_Div = binop("/")
+ visit_FloorDiv = binop("//")
+ visit_Pow = binop("**")
+ visit_Mod = binop("%")
+ visit_And = binop("and", interceptable=False)
+ visit_Or = binop("or", interceptable=False)
+ visit_Pos = uaop("+")
+ visit_Neg = uaop("-")
+ visit_Not = uaop("not ", interceptable=False)
del binop, uaop
@optimizeconst
def visit_Concat(self, node, frame):
if frame.eval_ctx.volatile:
- func_name = '(context.eval_ctx.volatile and' \
- ' markup_join or unicode_join)'
+ func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)"
elif frame.eval_ctx.autoescape:
- func_name = 'markup_join'
+ func_name = "markup_join"
else:
- func_name = 'unicode_join'
- self.write('%s((' % func_name)
+ func_name = "unicode_join"
+ self.write("%s((" % func_name)
for arg in node.nodes:
self.visit(arg, frame)
- self.write(', ')
- self.write('))')
+ self.write(", ")
+ self.write("))")
@optimizeconst
def visit_Compare(self, node, frame):
+ self.write("(")
self.visit(node.expr, frame)
for op in node.ops:
self.visit(op, frame)
+ self.write(")")
def visit_Operand(self, node, frame):
- self.write(' %s ' % operators[node.op])
+ self.write(" %s " % operators[node.op])
self.visit(node.expr, frame)
@optimizeconst
def visit_Getattr(self, node, frame):
- self.write('environment.getattr(')
+ if self.environment.is_async:
+ self.write("(await auto_await(")
+
+ self.write("environment.getattr(")
self.visit(node.node, frame)
- self.write(', %r)' % node.attr)
+ self.write(", %r)" % node.attr)
+
+ if self.environment.is_async:
+ self.write("))")
@optimizeconst
def visit_Getitem(self, node, frame):
# slices bypass the environment getitem method.
if isinstance(node.arg, nodes.Slice):
self.visit(node.node, frame)
- self.write('[')
+ self.write("[")
self.visit(node.arg, frame)
- self.write(']')
+ self.write("]")
else:
- self.write('environment.getitem(')
+ if self.environment.is_async:
+ self.write("(await auto_await(")
+
+ self.write("environment.getitem(")
self.visit(node.node, frame)
- self.write(', ')
+ self.write(", ")
self.visit(node.arg, frame)
- self.write(')')
+ self.write(")")
+
+ if self.environment.is_async:
+ self.write("))")
def visit_Slice(self, node, frame):
if node.start is not None:
self.visit(node.start, frame)
- self.write(':')
+ self.write(":")
if node.stop is not None:
self.visit(node.stop, frame)
if node.step is not None:
- self.write(':')
+ self.write(":")
self.visit(node.step, frame)
@optimizeconst
def visit_Filter(self, node, frame):
if self.environment.is_async:
- self.write('await auto_await(')
- self.write(self.filters[node.name] + '(')
+ self.write("await auto_await(")
+ self.write(self.filters[node.name] + "(")
func = self.environment.filters.get(node.name)
if func is None:
- self.fail('no filter named %r' % node.name, node.lineno)
- if getattr(func, 'contextfilter', False):
- self.write('context, ')
- elif getattr(func, 'evalcontextfilter', False):
- self.write('context.eval_ctx, ')
- elif getattr(func, 'environmentfilter', False):
- self.write('environment, ')
+ self.fail("no filter named %r" % node.name, node.lineno)
+ if getattr(func, "contextfilter", False) is True:
+ self.write("context, ")
+ elif getattr(func, "evalcontextfilter", False) is True:
+ self.write("context.eval_ctx, ")
+ elif getattr(func, "environmentfilter", False) is True:
+ self.write("environment, ")
# if the filter node is None we are inside a filter block
# and want to write to the current buffer
if node.node is not None:
self.visit(node.node, frame)
elif frame.eval_ctx.volatile:
- self.write('(context.eval_ctx.autoescape and'
- ' Markup(concat(%s)) or concat(%s))' %
- (frame.buffer, frame.buffer))
+ self.write(
+ "(context.eval_ctx.autoescape and"
+ " Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer)
+ )
elif frame.eval_ctx.autoescape:
- self.write('Markup(concat(%s))' % frame.buffer)
+ self.write("Markup(concat(%s))" % frame.buffer)
else:
- self.write('concat(%s)' % frame.buffer)
+ self.write("concat(%s)" % frame.buffer)
self.signature(node, frame)
- self.write(')')
+ self.write(")")
if self.environment.is_async:
- self.write(')')
+ self.write(")")
@optimizeconst
def visit_Test(self, node, frame):
- self.write(self.tests[node.name] + '(')
+ self.write(self.tests[node.name] + "(")
if node.name not in self.environment.tests:
- self.fail('no test named %r' % node.name, node.lineno)
+ self.fail("no test named %r" % node.name, node.lineno)
self.visit(node.node, frame)
self.signature(node, frame)
- self.write(')')
+ self.write(")")
@optimizeconst
def visit_CondExpr(self, node, frame):
def write_expr2():
if node.expr2 is not None:
return self.visit(node.expr2, frame)
- self.write('undefined(%r)' % ('the inline if-'
- 'expression on %s evaluated to false and '
- 'no else section was defined.' % self.position(node)))
-
- self.write('(')
+ self.write(
+ "cond_expr_undefined(%r)"
+ % (
+ "the inline if-"
+ "expression on %s evaluated to false and "
+ "no else section was defined." % self.position(node)
+ )
+ )
+
+ self.write("(")
self.visit(node.expr1, frame)
- self.write(' if ')
+ self.write(" if ")
self.visit(node.test, frame)
- self.write(' else ')
+ self.write(" else ")
write_expr2()
- self.write(')')
+ self.write(")")
@optimizeconst
def visit_Call(self, node, frame, forward_caller=False):
if self.environment.is_async:
- self.write('await auto_await(')
+ self.write("await auto_await(")
if self.environment.sandboxed:
- self.write('environment.call(context, ')
+ self.write("environment.call(context, ")
else:
- self.write('context.call(')
+ self.write("context.call(")
self.visit(node.node, frame)
- extra_kwargs = forward_caller and {'caller': 'caller'} or None
+ extra_kwargs = forward_caller and {"caller": "caller"} or None
self.signature(node, frame, extra_kwargs)
- self.write(')')
+ self.write(")")
if self.environment.is_async:
- self.write(')')
+ self.write(")")
def visit_Keyword(self, node, frame):
- self.write(node.key + '=')
+ self.write(node.key + "=")
self.visit(node.value, frame)
# -- Unused nodes for extensions
def visit_MarkSafe(self, node, frame):
- self.write('Markup(')
+ self.write("Markup(")
self.visit(node.expr, frame)
- self.write(')')
+ self.write(")")
def visit_MarkSafeIfAutoescape(self, node, frame):
- self.write('(context.eval_ctx.autoescape and Markup or identity)(')
+ self.write("(context.eval_ctx.autoescape and Markup or identity)(")
self.visit(node.expr, frame)
- self.write(')')
+ self.write(")")
def visit_EnvironmentAttribute(self, node, frame):
- self.write('environment.' + node.name)
+ self.write("environment." + node.name)
def visit_ExtensionAttribute(self, node, frame):
- self.write('environment.extensions[%r].%s' % (node.identifier, node.name))
+ self.write("environment.extensions[%r].%s" % (node.identifier, node.name))
def visit_ImportedName(self, node, frame):
self.write(self.import_aliases[node.importname])
@@ -1670,13 +1789,16 @@ class CodeGenerator(NodeVisitor):
self.write(node.name)
def visit_ContextReference(self, node, frame):
- self.write('context')
+ self.write("context")
+
+ def visit_DerivedContextReference(self, node, frame):
+ self.write(self.derive_context(frame))
def visit_Continue(self, node, frame):
- self.writeline('continue', node)
+ self.writeline("continue", node)
def visit_Break(self, node, frame):
- self.writeline('break', node)
+ self.writeline("break", node)
def visit_Scope(self, node, frame):
scope_frame = frame.inner()
@@ -1687,8 +1809,8 @@ class CodeGenerator(NodeVisitor):
def visit_OverlayScope(self, node, frame):
ctx = self.temporary_identifier()
- self.writeline('%s = %s' % (ctx, self.derive_context(frame)))
- self.writeline('%s.vars = ' % ctx)
+ self.writeline("%s = %s" % (ctx, self.derive_context(frame)))
+ self.writeline("%s.vars = " % ctx)
self.visit(node.context, frame)
self.push_context_reference(ctx)
@@ -1701,7 +1823,7 @@ class CodeGenerator(NodeVisitor):
def visit_EvalContextModifier(self, node, frame):
for keyword in node.options:
- self.writeline('context.eval_ctx.%s = ' % keyword.key)
+ self.writeline("context.eval_ctx.%s = " % keyword.key)
self.visit(keyword.value, frame)
try:
val = keyword.value.as_const(frame.eval_ctx)
@@ -1713,9 +1835,9 @@ class CodeGenerator(NodeVisitor):
def visit_ScopedEvalContextModifier(self, node, frame):
old_ctx_name = self.temporary_identifier()
saved_ctx = frame.eval_ctx.save()
- self.writeline('%s = context.eval_ctx.save()' % old_ctx_name)
+ self.writeline("%s = context.eval_ctx.save()" % old_ctx_name)
self.visit_EvalContextModifier(node, frame)
for child in node.body:
self.visit(child, frame)
frame.eval_ctx.revert(saved_ctx)
- self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name)
+ self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name)
diff --git a/lib/spack/external/jinja2/constants.py b/lib/spack/external/jinja2/constants.py
index 11efd1ed15..bf7f2ca721 100644
--- a/lib/spack/external/jinja2/constants.py
+++ b/lib/spack/external/jinja2/constants.py
@@ -1,17 +1,6 @@
# -*- coding: utf-8 -*-
-"""
- jinja.constants
- ~~~~~~~~~~~~~~~
-
- Various constants.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-
-
#: list of lorem ipsum words used by the lipsum() helper function
-LOREM_IPSUM_WORDS = u'''\
+LOREM_IPSUM_WORDS = u"""\
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
auctor augue bibendum blandit class commodo condimentum congue consectetuer
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
@@ -29,4 +18,4 @@ ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
-viverra volutpat vulputate'''
+viverra volutpat vulputate"""
diff --git a/lib/spack/external/jinja2/debug.py b/lib/spack/external/jinja2/debug.py
index b61139f0cd..5d8aec31d0 100644
--- a/lib/spack/external/jinja2/debug.py
+++ b/lib/spack/external/jinja2/debug.py
@@ -1,372 +1,268 @@
-# -*- coding: utf-8 -*-
-"""
- jinja2.debug
- ~~~~~~~~~~~~
-
- Implements the debug interface for Jinja. This module does some pretty
- ugly stuff with the Python traceback system in order to achieve tracebacks
- with correct line numbers, locals and contents.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
import sys
-import traceback
-from types import TracebackType, CodeType
-from jinja2.utils import missing, internal_code
-from jinja2.exceptions import TemplateSyntaxError
-from jinja2._compat import iteritems, reraise, PY2
+from types import CodeType
-# on pypy we can take advantage of transparent proxies
-try:
- from __pypy__ import tproxy
-except ImportError:
- tproxy = None
+from . import TemplateSyntaxError
+from ._compat import PYPY
+from .utils import internal_code
+from .utils import missing
-# how does the raise helper look like?
-try:
- exec("raise TypeError, 'foo'")
-except SyntaxError:
- raise_helper = 'raise __jinja_exception__[1]'
-except TypeError:
- raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
+def rewrite_traceback_stack(source=None):
+ """Rewrite the current exception to replace any tracebacks from
+ within compiled template code with tracebacks that look like they
+ came from the template source.
+ This must be called within an ``except`` block.
-class TracebackFrameProxy(object):
- """Proxies a traceback frame."""
+ :param exc_info: A :meth:`sys.exc_info` tuple. If not provided,
+ the current ``exc_info`` is used.
+ :param source: For ``TemplateSyntaxError``, the original source if
+ known.
+ :return: A :meth:`sys.exc_info` tuple that can be re-raised.
+ """
+ exc_type, exc_value, tb = sys.exc_info()
- def __init__(self, tb):
- self.tb = tb
- self._tb_next = None
+ if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
+ exc_value.translated = True
+ exc_value.source = source
- @property
- def tb_next(self):
- return self._tb_next
+ try:
+ # Remove the old traceback on Python 3, otherwise the frames
+ # from the compiler still show up.
+ exc_value.with_traceback(None)
+ except AttributeError:
+ pass
- def set_next(self, next):
- if tb_set_next is not None:
- try:
- tb_set_next(self.tb, next and next.tb or None)
- except Exception:
- # this function can fail due to all the hackery it does
- # on various python implementations. We just catch errors
- # down and ignore them if necessary.
- pass
- self._tb_next = next
-
- @property
- def is_jinja_frame(self):
- return '__jinja_template__' in self.tb.tb_frame.f_globals
-
- def __getattr__(self, name):
- return getattr(self.tb, name)
-
-
-def make_frame_proxy(frame):
- proxy = TracebackFrameProxy(frame)
- if tproxy is None:
- return proxy
- def operation_handler(operation, *args, **kwargs):
- if operation in ('__getattribute__', '__getattr__'):
- return getattr(proxy, args[0])
- elif operation == '__setattr__':
- proxy.__setattr__(*args, **kwargs)
- else:
- return getattr(proxy, operation)(*args, **kwargs)
- return tproxy(TracebackType, operation_handler)
-
-
-class ProcessedTraceback(object):
- """Holds a Jinja preprocessed traceback for printing or reraising."""
-
- def __init__(self, exc_type, exc_value, frames):
- assert frames, 'no frames for this traceback?'
- self.exc_type = exc_type
- self.exc_value = exc_value
- self.frames = frames
-
- # newly concatenate the frames (which are proxies)
- prev_tb = None
- for tb in self.frames:
- if prev_tb is not None:
- prev_tb.set_next(tb)
- prev_tb = tb
- prev_tb.set_next(None)
-
- def render_as_text(self, limit=None):
- """Return a string with the traceback."""
- lines = traceback.format_exception(self.exc_type, self.exc_value,
- self.frames[0], limit=limit)
- return ''.join(lines).rstrip()
-
- def render_as_html(self, full=False):
- """Return a unicode string with the traceback as rendered HTML."""
- from jinja2.debugrenderer import render_traceback
- return u'%s\n\n<!--\n%s\n-->' % (
- render_traceback(self, full=full),
- self.render_as_text().decode('utf-8', 'replace')
+ # Outside of runtime, so the frame isn't executing template
+ # code, but it still needs to point at the template.
+ tb = fake_traceback(
+ exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
)
-
- @property
- def is_template_syntax_error(self):
- """`True` if this is a template syntax error."""
- return isinstance(self.exc_value, TemplateSyntaxError)
-
- @property
- def exc_info(self):
- """Exception info tuple with a proxy around the frame objects."""
- return self.exc_type, self.exc_value, self.frames[0]
-
- @property
- def standard_exc_info(self):
- """Standard python exc_info for re-raising"""
- tb = self.frames[0]
- # the frame will be an actual traceback (or transparent proxy) if
- # we are on pypy or a python implementation with support for tproxy
- if type(tb) is not TracebackType:
- tb = tb.tb
- return self.exc_type, self.exc_value, tb
-
-
-def make_traceback(exc_info, source_hint=None):
- """Creates a processed traceback object from the exc_info."""
- exc_type, exc_value, tb = exc_info
- if isinstance(exc_value, TemplateSyntaxError):
- exc_info = translate_syntax_error(exc_value, source_hint)
- initial_skip = 0
else:
- initial_skip = 1
- return translate_exception(exc_info, initial_skip)
-
-
-def translate_syntax_error(error, source=None):
- """Rewrites a syntax error to please traceback systems."""
- error.source = source
- error.translated = True
- exc_info = (error.__class__, error, None)
- filename = error.filename
- if filename is None:
- filename = '<unknown>'
- return fake_exc_info(exc_info, filename, error.lineno)
+ # Skip the frame for the render function.
+ tb = tb.tb_next
+ stack = []
-def translate_exception(exc_info, initial_skip=0):
- """If passed an exc_info it will automatically rewrite the exceptions
- all the way down to the correct line numbers and frames.
- """
- tb = exc_info[2]
- frames = []
-
- # skip some internal frames if wanted
- for x in range(initial_skip):
- if tb is not None:
- tb = tb.tb_next
- initial_tb = tb
-
+ # Build the stack of traceback object, replacing any in template
+ # code with the source file and line information.
while tb is not None:
- # skip frames decorated with @internalcode. These are internal
- # calls we can't avoid and that are useless in template debugging
- # output.
+ # Skip frames decorated with @internalcode. These are internal
+ # calls that aren't useful in template debugging output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
- # save a reference to the next frame if we override the current
- # one with a faked one.
- next = tb.tb_next
+ template = tb.tb_frame.f_globals.get("__jinja_template__")
- # fake template exceptions
- template = tb.tb_frame.f_globals.get('__jinja_template__')
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
- tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
- lineno)[2]
+ fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
+ stack.append(fake_tb)
+ else:
+ stack.append(tb)
- frames.append(make_frame_proxy(tb))
- tb = next
+ tb = tb.tb_next
- # if we don't have any exceptions in the frames left, we have to
- # reraise it unchanged.
- # XXX: can we backup here? when could this happen?
- if not frames:
- reraise(exc_info[0], exc_info[1], exc_info[2])
+ tb_next = None
- return ProcessedTraceback(exc_info[0], exc_info[1], frames)
+ # Assign tb_next in reverse to avoid circular references.
+ for tb in reversed(stack):
+ tb_next = tb_set_next(tb, tb_next)
+ return exc_type, exc_value, tb_next
-def get_jinja_locals(real_locals):
- ctx = real_locals.get('context')
- if ctx:
- locals = ctx.get_all().copy()
+
+def fake_traceback(exc_value, tb, filename, lineno):
+ """Produce a new traceback object that looks like it came from the
+ template source instead of the compiled code. The filename, line
+ number, and location name will point to the template, and the local
+ variables will be the current template context.
+
+ :param exc_value: The original exception to be re-raised to create
+ the new traceback.
+ :param tb: The original traceback to get the local variables and
+ code info from.
+ :param filename: The template filename.
+ :param lineno: The line number in the template source.
+ """
+ if tb is not None:
+ # Replace the real locals with the context that would be
+ # available at that point in the template.
+ locals = get_template_locals(tb.tb_frame.f_locals)
+ locals.pop("__jinja_exception__", None)
else:
locals = {}
+ globals = {
+ "__name__": filename,
+ "__file__": filename,
+ "__jinja_exception__": exc_value,
+ }
+ # Raise an exception at the correct line number.
+ code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
+
+ # Build a new code object that points to the template file and
+ # replaces the location with a block name.
+ try:
+ location = "template"
+
+ if tb is not None:
+ function = tb.tb_frame.f_code.co_name
+
+ if function == "root":
+ location = "top-level template code"
+ elif function.startswith("block_"):
+ location = 'block "%s"' % function[6:]
+
+ # Collect arguments for the new code object. CodeType only
+ # accepts positional arguments, and arguments were inserted in
+ # new Python versions.
+ code_args = []
+
+ for attr in (
+ "argcount",
+ "posonlyargcount", # Python 3.8
+ "kwonlyargcount", # Python 3
+ "nlocals",
+ "stacksize",
+ "flags",
+ "code", # codestring
+ "consts", # constants
+ "names",
+ "varnames",
+ ("filename", filename),
+ ("name", location),
+ "firstlineno",
+ "lnotab",
+ "freevars",
+ "cellvars",
+ ):
+ if isinstance(attr, tuple):
+ # Replace with given value.
+ code_args.append(attr[1])
+ continue
+
+ try:
+ # Copy original value if it exists.
+ code_args.append(getattr(code, "co_" + attr))
+ except AttributeError:
+ # Some arguments were added later.
+ continue
+
+ code = CodeType(*code_args)
+ except Exception:
+ # Some environments such as Google App Engine don't support
+ # modifying code objects.
+ pass
+
+ # Execute the new code, which is guaranteed to raise, and return
+ # the new traceback without this frame.
+ try:
+ exec(code, globals, locals)
+ except BaseException:
+ return sys.exc_info()[2].tb_next
+
+
+def get_template_locals(real_locals):
+ """Based on the runtime locals, get the context that would be
+ available at that point in the template.
+ """
+ # Start with the current template context.
+ ctx = real_locals.get("context")
+
+ if ctx:
+ data = ctx.get_all().copy()
+ else:
+ data = {}
+
+ # Might be in a derived context that only sets local variables
+ # rather than pushing a context. Local variables follow the scheme
+ # l_depth_name. Find the highest-depth local that has a value for
+ # each name.
local_overrides = {}
- for name, value in iteritems(real_locals):
- if not name.startswith('l_') or value is missing:
+ for name, value in real_locals.items():
+ if not name.startswith("l_") or value is missing:
+ # Not a template variable, or no longer relevant.
continue
+
try:
- _, depth, name = name.split('_', 2)
+ _, depth, name = name.split("_", 2)
depth = int(depth)
except ValueError:
continue
+
cur_depth = local_overrides.get(name, (-1,))[0]
+
if cur_depth < depth:
local_overrides[name] = (depth, value)
- for name, (_, value) in iteritems(local_overrides):
+ # Modify the context with any derived context.
+ for name, (_, value) in local_overrides.items():
if value is missing:
- locals.pop(name, None)
+ data.pop(name, None)
else:
- locals[name] = value
+ data[name] = value
- return locals
+ return data
-def fake_exc_info(exc_info, filename, lineno):
- """Helper for `translate_exception`."""
- exc_type, exc_value, tb = exc_info
+if sys.version_info >= (3, 7):
+ # tb_next is directly assignable as of Python 3.7
+ def tb_set_next(tb, tb_next):
+ tb.tb_next = tb_next
+ return tb
- # figure the real context out
- if tb is not None:
- locals = get_jinja_locals(tb.tb_frame.f_locals)
- # if there is a local called __jinja_exception__, we get
- # rid of it to not break the debug functionality.
- locals.pop('__jinja_exception__', None)
- else:
- locals = {}
-
- # assamble fake globals we need
- globals = {
- '__name__': filename,
- '__file__': filename,
- '__jinja_exception__': exc_info[:2],
-
- # we don't want to keep the reference to the template around
- # to not cause circular dependencies, but we mark it as Jinja
- # frame for the ProcessedTraceback
- '__jinja_template__': None
- }
-
- # and fake the exception
- code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
-
- # if it's possible, change the name of the code. This won't work
- # on some python environments such as google appengine
+elif PYPY:
+ # PyPy might have special support, and won't work with ctypes.
try:
- if tb is None:
- location = 'template'
- else:
- function = tb.tb_frame.f_code.co_name
- if function == 'root':
- location = 'top-level template code'
- elif function.startswith('block_'):
- location = 'block "%s"' % function[6:]
- else:
- location = 'template'
-
- if PY2:
- code = CodeType(0, code.co_nlocals, code.co_stacksize,
- code.co_flags, code.co_code, code.co_consts,
- code.co_names, code.co_varnames, filename,
- location, code.co_firstlineno,
- code.co_lnotab, (), ())
- else:
- code = CodeType(0, code.co_kwonlyargcount,
- code.co_nlocals, code.co_stacksize,
- code.co_flags, code.co_code, code.co_consts,
- code.co_names, code.co_varnames, filename,
- location, code.co_firstlineno,
- code.co_lnotab, (), ())
- except Exception as e:
- pass
+ import tputil
+ except ImportError:
+ # Without tproxy support, use the original traceback.
+ def tb_set_next(tb, tb_next):
+ return tb
- # execute the code and catch the new traceback
- try:
- exec(code, globals, locals)
- except:
- exc_info = sys.exc_info()
- new_tb = exc_info[2].tb_next
+ else:
+ # With tproxy support, create a proxy around the traceback that
+ # returns the new tb_next.
+ def tb_set_next(tb, tb_next):
+ def controller(op):
+ if op.opname == "__getattribute__" and op.args[0] == "tb_next":
+ return tb_next
- # return without this frame
- return exc_info[:2] + (new_tb,)
+ return op.delegate()
+ return tputil.make_proxy(controller, obj=tb)
-def _init_ugly_crap():
- """This function implements a few ugly things so that we can patch the
- traceback objects. The function returned allows resetting `tb_next` on
- any python traceback object. Do not attempt to use this on non cpython
- interpreters
- """
- import ctypes
- from types import TracebackType
- if PY2:
- # figure out size of _Py_ssize_t for Python 2:
- if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
- _Py_ssize_t = ctypes.c_int64
- else:
- _Py_ssize_t = ctypes.c_int
- else:
- # platform ssize_t on Python 3
- _Py_ssize_t = ctypes.c_ssize_t
+else:
+ # Use ctypes to assign tb_next at the C level since it's read-only
+ # from Python.
+ import ctypes
- # regular python
- class _PyObject(ctypes.Structure):
- pass
- _PyObject._fields_ = [
- ('ob_refcnt', _Py_ssize_t),
- ('ob_type', ctypes.POINTER(_PyObject))
- ]
-
- # python with trace
- if hasattr(sys, 'getobjects'):
- class _PyObject(ctypes.Structure):
- pass
- _PyObject._fields_ = [
- ('_ob_next', ctypes.POINTER(_PyObject)),
- ('_ob_prev', ctypes.POINTER(_PyObject)),
- ('ob_refcnt', _Py_ssize_t),
- ('ob_type', ctypes.POINTER(_PyObject))
+ class _CTraceback(ctypes.Structure):
+ _fields_ = [
+ # Extra PyObject slots when compiled with Py_TRACE_REFS.
+ ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
+ # Only care about tb_next as an object, not a traceback.
+ ("tb_next", ctypes.py_object),
]
- class _Traceback(_PyObject):
- pass
- _Traceback._fields_ = [
- ('tb_next', ctypes.POINTER(_Traceback)),
- ('tb_frame', ctypes.POINTER(_PyObject)),
- ('tb_lasti', ctypes.c_int),
- ('tb_lineno', ctypes.c_int)
- ]
-
- def tb_set_next(tb, next):
- """Set the tb_next attribute of a traceback object."""
- if not (isinstance(tb, TracebackType) and
- (next is None or isinstance(next, TracebackType))):
- raise TypeError('tb_set_next arguments must be traceback objects')
- obj = _Traceback.from_address(id(tb))
- if tb.tb_next is not None:
- old = _Traceback.from_address(id(tb.tb_next))
- old.ob_refcnt -= 1
- if next is None:
- obj.tb_next = ctypes.POINTER(_Traceback)()
- else:
- next = _Traceback.from_address(id(next))
- next.ob_refcnt += 1
- obj.tb_next = ctypes.pointer(next)
+ def tb_set_next(tb, tb_next):
+ c_tb = _CTraceback.from_address(id(tb))
- return tb_set_next
+ # Clear out the old tb_next.
+ if tb.tb_next is not None:
+ c_tb_next = ctypes.py_object(tb.tb_next)
+ c_tb.tb_next = ctypes.py_object()
+ ctypes.pythonapi.Py_DecRef(c_tb_next)
+ # Assign the new tb_next.
+ if tb_next is not None:
+ c_tb_next = ctypes.py_object(tb_next)
+ ctypes.pythonapi.Py_IncRef(c_tb_next)
+ c_tb.tb_next = c_tb_next
-# try to get a tb_set_next implementation if we don't have transparent
-# proxies.
-tb_set_next = None
-if tproxy is None:
- try:
- tb_set_next = _init_ugly_crap()
- except:
- pass
- del _init_ugly_crap
+ return tb
diff --git a/lib/spack/external/jinja2/defaults.py b/lib/spack/external/jinja2/defaults.py
index 7c93dec0ae..8e0e7d7710 100644
--- a/lib/spack/external/jinja2/defaults.py
+++ b/lib/spack/external/jinja2/defaults.py
@@ -1,56 +1,44 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.defaults
- ~~~~~~~~~~~~~~~
-
- Jinja default filters and tags.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-from jinja2._compat import range_type
-from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner, Namespace
-
+from ._compat import range_type
+from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
+from .tests import TESTS as DEFAULT_TESTS # noqa: F401
+from .utils import Cycler
+from .utils import generate_lorem_ipsum
+from .utils import Joiner
+from .utils import Namespace
# defaults for the parser / lexer
-BLOCK_START_STRING = '{%'
-BLOCK_END_STRING = '%}'
-VARIABLE_START_STRING = '{{'
-VARIABLE_END_STRING = '}}'
-COMMENT_START_STRING = '{#'
-COMMENT_END_STRING = '#}'
+BLOCK_START_STRING = "{%"
+BLOCK_END_STRING = "%}"
+VARIABLE_START_STRING = "{{"
+VARIABLE_END_STRING = "}}"
+COMMENT_START_STRING = "{#"
+COMMENT_END_STRING = "#}"
LINE_STATEMENT_PREFIX = None
LINE_COMMENT_PREFIX = None
TRIM_BLOCKS = False
LSTRIP_BLOCKS = False
-NEWLINE_SEQUENCE = '\n'
+NEWLINE_SEQUENCE = "\n"
KEEP_TRAILING_NEWLINE = False
-
# default filters, tests and namespace
-from jinja2.filters import FILTERS as DEFAULT_FILTERS
-from jinja2.tests import TESTS as DEFAULT_TESTS
+
DEFAULT_NAMESPACE = {
- 'range': range_type,
- 'dict': dict,
- 'lipsum': generate_lorem_ipsum,
- 'cycler': Cycler,
- 'joiner': Joiner,
- 'namespace': Namespace
+ "range": range_type,
+ "dict": dict,
+ "lipsum": generate_lorem_ipsum,
+ "cycler": Cycler,
+ "joiner": Joiner,
+ "namespace": Namespace,
}
-
# default policies
DEFAULT_POLICIES = {
- 'compiler.ascii_str': True,
- 'urlize.rel': 'noopener',
- 'urlize.target': None,
- 'truncate.leeway': 5,
- 'json.dumps_function': None,
- 'json.dumps_kwargs': {'sort_keys': True},
- 'ext.i18n.trimmed': False,
+ "compiler.ascii_str": True,
+ "urlize.rel": "noopener",
+ "urlize.target": None,
+ "truncate.leeway": 5,
+ "json.dumps_function": None,
+ "json.dumps_kwargs": {"sort_keys": True},
+ "ext.i18n.trimmed": False,
}
-
-
-# export all constants
-__all__ = tuple(x for x in locals().keys() if x.isupper())
diff --git a/lib/spack/external/jinja2/environment.py b/lib/spack/external/jinja2/environment.py
index 549d9afab4..8430390eea 100644
--- a/lib/spack/external/jinja2/environment.py
+++ b/lib/spack/external/jinja2/environment.py
@@ -1,60 +1,83 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.environment
- ~~~~~~~~~~~~~~~~~~
-
- Provides a class that holds runtime and parsing time options.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
+"""Classes for managing templates and their runtime and compile time
+options.
"""
import os
import sys
import weakref
-from functools import reduce, partial
-from jinja2 import nodes
-from jinja2.defaults import BLOCK_START_STRING, \
- BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
- COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
- LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
- DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \
- DEFAULT_POLICIES, KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
-from jinja2.lexer import get_lexer, TokenStream
-from jinja2.parser import Parser
-from jinja2.nodes import EvalContext
-from jinja2.compiler import generate, CodeGenerator
-from jinja2.runtime import Undefined, new_context, Context
-from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \
- TemplatesNotFound, TemplateRuntimeError
-from jinja2.utils import import_string, LRUCache, Markup, missing, \
- concat, consume, internalcode, have_async_gen
-from jinja2._compat import imap, ifilter, string_types, iteritems, \
- text_type, reraise, implements_iterator, implements_to_string, \
- encode_filename, PY2, PYPY
-
+from functools import partial
+from functools import reduce
+
+from markupsafe import Markup
+
+from . import nodes
+from ._compat import encode_filename
+from ._compat import implements_iterator
+from ._compat import implements_to_string
+from ._compat import iteritems
+from ._compat import PY2
+from ._compat import PYPY
+from ._compat import reraise
+from ._compat import string_types
+from ._compat import text_type
+from .compiler import CodeGenerator
+from .compiler import generate
+from .defaults import BLOCK_END_STRING
+from .defaults import BLOCK_START_STRING
+from .defaults import COMMENT_END_STRING
+from .defaults import COMMENT_START_STRING
+from .defaults import DEFAULT_FILTERS
+from .defaults import DEFAULT_NAMESPACE
+from .defaults import DEFAULT_POLICIES
+from .defaults import DEFAULT_TESTS
+from .defaults import KEEP_TRAILING_NEWLINE
+from .defaults import LINE_COMMENT_PREFIX
+from .defaults import LINE_STATEMENT_PREFIX
+from .defaults import LSTRIP_BLOCKS
+from .defaults import NEWLINE_SEQUENCE
+from .defaults import TRIM_BLOCKS
+from .defaults import VARIABLE_END_STRING
+from .defaults import VARIABLE_START_STRING
+from .exceptions import TemplateNotFound
+from .exceptions import TemplateRuntimeError
+from .exceptions import TemplatesNotFound
+from .exceptions import TemplateSyntaxError
+from .exceptions import UndefinedError
+from .lexer import get_lexer
+from .lexer import TokenStream
+from .nodes import EvalContext
+from .parser import Parser
+from .runtime import Context
+from .runtime import new_context
+from .runtime import Undefined
+from .utils import concat
+from .utils import consume
+from .utils import have_async_gen
+from .utils import import_string
+from .utils import internalcode
+from .utils import LRUCache
+from .utils import missing
# for direct template usage we have up to ten living environments
_spontaneous_environments = LRUCache(10)
-# the function to create jinja traceback objects. This is dynamically
-# imported on the first exception in the exception handler.
-_make_traceback = None
+def get_spontaneous_environment(cls, *args):
+ """Return a new spontaneous environment. A spontaneous environment
+ is used for templates created directly rather than through an
+ existing environment.
-def get_spontaneous_environment(*args):
- """Return a new spontaneous environment. A spontaneous environment is an
- unnamed and unaccessible (in theory) environment that is used for
- templates generated from a string and not from the file system.
+ :param cls: Environment class to create.
+ :param args: Positional arguments passed to environment.
"""
+ key = (cls, args)
+
try:
- env = _spontaneous_environments.get(args)
- except TypeError:
- return Environment(*args)
- if env is not None:
+ return _spontaneous_environments[key]
+ except KeyError:
+ _spontaneous_environments[key] = env = cls(*args)
+ env.shared = True
return env
- _spontaneous_environments[args] = env = Environment(*args)
- env.shared = True
- return env
def create_cache(size):
@@ -93,20 +116,25 @@ def fail_for_missing_callable(string, name):
try:
name._fail_with_undefined_error()
except Exception as e:
- msg = '%s (%s; did you forget to quote the callable name?)' % (msg, e)
+ msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e)
raise TemplateRuntimeError(msg)
def _environment_sanity_check(environment):
"""Perform a sanity check on the environment."""
- assert issubclass(environment.undefined, Undefined), 'undefined must ' \
- 'be a subclass of undefined because filters depend on it.'
- assert environment.block_start_string != \
- environment.variable_start_string != \
- environment.comment_start_string, 'block, variable and comment ' \
- 'start strings must be different'
- assert environment.newline_sequence in ('\r', '\r\n', '\n'), \
- 'newline_sequence set to unknown line ending string.'
+ assert issubclass(
+ environment.undefined, Undefined
+ ), "undefined must be a subclass of undefined because filters depend on it."
+ assert (
+ environment.block_start_string
+ != environment.variable_start_string
+ != environment.comment_start_string
+ ), "block, variable and comment start strings must be different"
+ assert environment.newline_sequence in (
+ "\r",
+ "\r\n",
+ "\n",
+ ), "newline_sequence set to unknown line ending string."
return environment
@@ -191,7 +219,7 @@ class Environment(object):
`autoescape`
If set to ``True`` the XML/HTML autoescaping feature is enabled by
default. For more details about autoescaping see
- :class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also
+ :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
be a callable that is passed the template name and has to
return ``True`` or ``False`` depending on autoescape should be
enabled by default.
@@ -249,10 +277,6 @@ class Environment(object):
#: must not be modified
shared = False
- #: these are currently EXPERIMENTAL undocumented features.
- exception_handler = None
- exception_formatter = None
-
#: the class that is used for code generation. See
#: :class:`~jinja2.compiler.CodeGenerator` for more information.
code_generator_class = CodeGenerator
@@ -261,29 +285,31 @@ class Environment(object):
#: :class:`~jinja2.runtime.Context` for more information.
context_class = Context
- def __init__(self,
- block_start_string=BLOCK_START_STRING,
- block_end_string=BLOCK_END_STRING,
- variable_start_string=VARIABLE_START_STRING,
- variable_end_string=VARIABLE_END_STRING,
- comment_start_string=COMMENT_START_STRING,
- comment_end_string=COMMENT_END_STRING,
- line_statement_prefix=LINE_STATEMENT_PREFIX,
- line_comment_prefix=LINE_COMMENT_PREFIX,
- trim_blocks=TRIM_BLOCKS,
- lstrip_blocks=LSTRIP_BLOCKS,
- newline_sequence=NEWLINE_SEQUENCE,
- keep_trailing_newline=KEEP_TRAILING_NEWLINE,
- extensions=(),
- optimized=True,
- undefined=Undefined,
- finalize=None,
- autoescape=False,
- loader=None,
- cache_size=400,
- auto_reload=True,
- bytecode_cache=None,
- enable_async=False):
+ def __init__(
+ self,
+ block_start_string=BLOCK_START_STRING,
+ block_end_string=BLOCK_END_STRING,
+ variable_start_string=VARIABLE_START_STRING,
+ variable_end_string=VARIABLE_END_STRING,
+ comment_start_string=COMMENT_START_STRING,
+ comment_end_string=COMMENT_END_STRING,
+ line_statement_prefix=LINE_STATEMENT_PREFIX,
+ line_comment_prefix=LINE_COMMENT_PREFIX,
+ trim_blocks=TRIM_BLOCKS,
+ lstrip_blocks=LSTRIP_BLOCKS,
+ newline_sequence=NEWLINE_SEQUENCE,
+ keep_trailing_newline=KEEP_TRAILING_NEWLINE,
+ extensions=(),
+ optimized=True,
+ undefined=Undefined,
+ finalize=None,
+ autoescape=False,
+ loader=None,
+ cache_size=400,
+ auto_reload=True,
+ bytecode_cache=None,
+ enable_async=False,
+ ):
# !!Important notice!!
# The constructor accepts quite a few arguments that should be
# passed by keyword rather than position. However it's important to
@@ -334,6 +360,9 @@ class Environment(object):
self.enable_async = enable_async
self.is_async = self.enable_async and have_async_gen
+ if self.is_async:
+ # runs patch_all() to enable async support
+ from . import asyncsupport # noqa: F401
_environment_sanity_check(self)
@@ -353,15 +382,28 @@ class Environment(object):
if not hasattr(self, key):
setattr(self, key, value)
- def overlay(self, block_start_string=missing, block_end_string=missing,
- variable_start_string=missing, variable_end_string=missing,
- comment_start_string=missing, comment_end_string=missing,
- line_statement_prefix=missing, line_comment_prefix=missing,
- trim_blocks=missing, lstrip_blocks=missing,
- extensions=missing, optimized=missing,
- undefined=missing, finalize=missing, autoescape=missing,
- loader=missing, cache_size=missing, auto_reload=missing,
- bytecode_cache=missing):
+ def overlay(
+ self,
+ block_start_string=missing,
+ block_end_string=missing,
+ variable_start_string=missing,
+ variable_end_string=missing,
+ comment_start_string=missing,
+ comment_end_string=missing,
+ line_statement_prefix=missing,
+ line_comment_prefix=missing,
+ trim_blocks=missing,
+ lstrip_blocks=missing,
+ extensions=missing,
+ optimized=missing,
+ undefined=missing,
+ finalize=missing,
+ autoescape=missing,
+ loader=missing,
+ cache_size=missing,
+ auto_reload=missing,
+ bytecode_cache=missing,
+ ):
"""Create a new overlay environment that shares all the data with the
current environment except for cache and the overridden attributes.
Extensions cannot be removed for an overlayed environment. An overlayed
@@ -374,7 +416,7 @@ class Environment(object):
through.
"""
args = dict(locals())
- del args['self'], args['cache_size'], args['extensions']
+ del args["self"], args["cache_size"], args["extensions"]
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
@@ -402,8 +444,7 @@ class Environment(object):
def iter_extensions(self):
"""Iterates over the extensions by priority."""
- return iter(sorted(self.extensions.values(),
- key=lambda x: x.priority))
+ return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
def getitem(self, obj, argument):
"""Get an item or attribute of an object but prefer the item."""
@@ -435,8 +476,9 @@ class Environment(object):
except (TypeError, LookupError, AttributeError):
return self.undefined(obj=obj, name=attribute)
- def call_filter(self, name, value, args=None, kwargs=None,
- context=None, eval_ctx=None):
+ def call_filter(
+ self, name, value, args=None, kwargs=None, context=None, eval_ctx=None
+ ):
"""Invokes a filter on a value the same way the compiler does it.
Note that on Python 3 this might return a coroutine in case the
@@ -448,21 +490,22 @@ class Environment(object):
"""
func = self.filters.get(name)
if func is None:
- fail_for_missing_callable('no filter named %r', name)
+ fail_for_missing_callable("no filter named %r", name)
args = [value] + list(args or ())
- if getattr(func, 'contextfilter', False):
+ if getattr(func, "contextfilter", False) is True:
if context is None:
- raise TemplateRuntimeError('Attempted to invoke context '
- 'filter without context')
+ raise TemplateRuntimeError(
+ "Attempted to invoke context filter without context"
+ )
args.insert(0, context)
- elif getattr(func, 'evalcontextfilter', False):
+ elif getattr(func, "evalcontextfilter", False) is True:
if eval_ctx is None:
if context is not None:
eval_ctx = context.eval_ctx
else:
eval_ctx = EvalContext(self)
args.insert(0, eval_ctx)
- elif getattr(func, 'environmentfilter', False):
+ elif getattr(func, "environmentfilter", False) is True:
args.insert(0, self)
return func(*args, **(kwargs or {}))
@@ -473,7 +516,7 @@ class Environment(object):
"""
func = self.tests.get(name)
if func is None:
- fail_for_missing_callable('no test named %r', name)
+ fail_for_missing_callable("no test named %r", name)
return func(value, *(args or ()), **(kwargs or {}))
@internalcode
@@ -483,14 +526,13 @@ class Environment(object):
executable source- or bytecode. This is useful for debugging or to
extract information from templates.
- If you are :ref:`developing Jinja2 extensions <writing-extensions>`
+ If you are :ref:`developing Jinja extensions <writing-extensions>`
this gives you a good overview of the node tree generated.
"""
try:
return self._parse(source, name, filename)
except TemplateSyntaxError:
- exc_info = sys.exc_info()
- self.handle_exception(exc_info, source_hint=source)
+ self.handle_exception(source=source)
def _parse(self, source, name, filename):
"""Internal parsing function used by `parse` and `compile`."""
@@ -510,16 +552,18 @@ class Environment(object):
try:
return self.lexer.tokeniter(source, name, filename)
except TemplateSyntaxError:
- exc_info = sys.exc_info()
- self.handle_exception(exc_info, source_hint=source)
+ self.handle_exception(source=source)
def preprocess(self, source, name=None, filename=None):
"""Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.
"""
- return reduce(lambda s, e: e.preprocess(s, name, filename),
- self.iter_extensions(), text_type(source))
+ return reduce(
+ lambda s, e: e.preprocess(s, name, filename),
+ self.iter_extensions(),
+ text_type(source),
+ )
def _tokenize(self, source, name, filename=None, state=None):
"""Called by the parser to do the preprocessing and filtering
@@ -539,8 +583,14 @@ class Environment(object):
.. versionadded:: 2.5
"""
- return generate(source, self, name, filename, defer_init=defer_init,
- optimized=self.optimized)
+ return generate(
+ source,
+ self,
+ name,
+ filename,
+ defer_init=defer_init,
+ optimized=self.optimized,
+ )
def _compile(self, source, filename):
"""Internal hook that can be overridden to hook a different compile
@@ -548,11 +598,10 @@ class Environment(object):
.. versionadded:: 2.5
"""
- return compile(source, filename, 'exec')
+ return compile(source, filename, "exec")
@internalcode
- def compile(self, source, name=None, filename=None, raw=False,
- defer_init=False):
+ def compile(self, source, name=None, filename=None, raw=False, defer_init=False):
"""Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
@@ -577,18 +626,16 @@ class Environment(object):
if isinstance(source, string_types):
source_hint = source
source = self._parse(source, name, filename)
- source = self._generate(source, name, filename,
- defer_init=defer_init)
+ source = self._generate(source, name, filename, defer_init=defer_init)
if raw:
return source
if filename is None:
- filename = '<template>'
+ filename = "<template>"
else:
filename = encode_filename(filename)
return self._compile(source, filename)
except TemplateSyntaxError:
- exc_info = sys.exc_info()
- self.handle_exception(exc_info, source_hint=source_hint)
+ self.handle_exception(source=source_hint)
def compile_expression(self, source, undefined_to_none=True):
"""A handy helper method that returns a callable that accepts keyword
@@ -618,26 +665,32 @@ class Environment(object):
.. versionadded:: 2.1
"""
- parser = Parser(self, source, state='variable')
- exc_info = None
+ parser = Parser(self, source, state="variable")
try:
expr = parser.parse_expression()
if not parser.stream.eos:
- raise TemplateSyntaxError('chunk after expression',
- parser.stream.current.lineno,
- None, None)
+ raise TemplateSyntaxError(
+ "chunk after expression", parser.stream.current.lineno, None, None
+ )
expr.set_environment(self)
except TemplateSyntaxError:
- exc_info = sys.exc_info()
- if exc_info is not None:
- self.handle_exception(exc_info, source_hint=source)
- body = [nodes.Assign(nodes.Name('result', 'store'), expr, lineno=1)]
+ if sys.exc_info() is not None:
+ self.handle_exception(source=source)
+
+ body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)]
template = self.from_string(nodes.Template(body, lineno=1))
return TemplateExpression(template, undefined_to_none)
- def compile_templates(self, target, extensions=None, filter_func=None,
- zip='deflated', log_function=None,
- ignore_errors=True, py_compile=False):
+ def compile_templates(
+ self,
+ target,
+ extensions=None,
+ filter_func=None,
+ zip="deflated",
+ log_function=None,
+ ignore_errors=True,
+ py_compile=False,
+ ):
"""Finds all the templates the loader can find, compiles them
and stores them in `target`. If `zip` is `None`, instead of in a
zipfile, the templates will be stored in a directory.
@@ -660,42 +713,52 @@ class Environment(object):
.. versionadded:: 2.4
"""
- from jinja2.loaders import ModuleLoader
+ from .loaders import ModuleLoader
if log_function is None:
- log_function = lambda x: None
+
+ def log_function(x):
+ pass
if py_compile:
if not PY2 or PYPY:
- from warnings import warn
- warn(Warning('py_compile has no effect on pypy or Python 3'))
+ import warnings
+
+ warnings.warn(
+ "'py_compile=True' has no effect on PyPy or Python"
+ " 3 and will be removed in version 3.0",
+ DeprecationWarning,
+ stacklevel=2,
+ )
py_compile = False
else:
import imp
import marshal
- py_header = imp.get_magic() + \
- u'\xff\xff\xff\xff'.encode('iso-8859-15')
+
+ py_header = imp.get_magic() + u"\xff\xff\xff\xff".encode("iso-8859-15")
# Python 3.3 added a source filesize to the header
if sys.version_info >= (3, 3):
- py_header += u'\x00\x00\x00\x00'.encode('iso-8859-15')
+ py_header += u"\x00\x00\x00\x00".encode("iso-8859-15")
- def write_file(filename, data, mode):
+ def write_file(filename, data):
if zip:
info = ZipInfo(filename)
info.external_attr = 0o755 << 16
zip_file.writestr(info, data)
else:
- f = open(os.path.join(target, filename), mode)
- try:
+ if isinstance(data, text_type):
+ data = data.encode("utf8")
+
+ with open(os.path.join(target, filename), "wb") as f:
f.write(data)
- finally:
- f.close()
if zip is not None:
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
- zip_file = ZipFile(target, 'w', dict(deflated=ZIP_DEFLATED,
- stored=ZIP_STORED)[zip])
+
+ zip_file = ZipFile(
+ target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip]
+ )
log_function('Compiling into Zip archive "%s"' % target)
else:
if not os.path.isdir(target):
@@ -717,18 +780,16 @@ class Environment(object):
if py_compile:
c = self._compile(code, encode_filename(filename))
- write_file(filename + 'c', py_header +
- marshal.dumps(c), 'wb')
- log_function('Byte-compiled "%s" as %s' %
- (name, filename + 'c'))
+ write_file(filename + "c", py_header + marshal.dumps(c))
+ log_function('Byte-compiled "%s" as %s' % (name, filename + "c"))
else:
- write_file(filename, code, 'w')
+ write_file(filename, code)
log_function('Compiled "%s" as %s' % (name, filename))
finally:
if zip:
zip_file.close()
- log_function('Finished compiling templates')
+ log_function("Finished compiling templates")
def list_templates(self, extensions=None, filter_func=None):
"""Returns a list of templates for this environment. This requires
@@ -746,38 +807,29 @@ class Environment(object):
.. versionadded:: 2.4
"""
- x = self.loader.list_templates()
+ names = self.loader.list_templates()
+
if extensions is not None:
if filter_func is not None:
- raise TypeError('either extensions or filter_func '
- 'can be passed, but not both')
- filter_func = lambda x: '.' in x and \
- x.rsplit('.', 1)[1] in extensions
+ raise TypeError(
+ "either extensions or filter_func can be passed, but not both"
+ )
+
+ def filter_func(x):
+ return "." in x and x.rsplit(".", 1)[1] in extensions
+
if filter_func is not None:
- x = list(ifilter(filter_func, x))
- return x
+ names = [name for name in names if filter_func(name)]
+
+ return names
- def handle_exception(self, exc_info=None, rendered=False, source_hint=None):
+ def handle_exception(self, source=None):
"""Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.
"""
- global _make_traceback
- if exc_info is None:
- exc_info = sys.exc_info()
-
- # the debugging module is imported when it's used for the first time.
- # we're doing a lot of stuff there and for applications that do not
- # get any exceptions in template rendering there is no need to load
- # all of that.
- if _make_traceback is None:
- from jinja2.debug import make_traceback as _make_traceback
- traceback = _make_traceback(exc_info, source_hint)
- if rendered and self.exception_formatter is not None:
- return self.exception_formatter(traceback)
- if self.exception_handler is not None:
- self.exception_handler(traceback)
- exc_type, exc_value, tb = traceback.standard_exc_info
- reraise(exc_type, exc_value, tb)
+ from .debug import rewrite_traceback_stack
+
+ reraise(*rewrite_traceback_stack(source=source))
def join_path(self, template, parent):
"""Join a template with the parent. By default all the lookups are
@@ -794,12 +846,13 @@ class Environment(object):
@internalcode
def _load_template(self, name, globals):
if self.loader is None:
- raise TypeError('no loader for this environment specified')
+ raise TypeError("no loader for this environment specified")
cache_key = (weakref.ref(self.loader), name)
if self.cache is not None:
template = self.cache.get(cache_key)
- if template is not None and (not self.auto_reload or
- template.is_up_to_date):
+ if template is not None and (
+ not self.auto_reload or template.is_up_to_date
+ ):
return template
template = self.loader.load(self, name, globals)
if self.cache is not None:
@@ -835,15 +888,24 @@ class Environment(object):
before it fails. If it cannot find any of the templates, it will
raise a :exc:`TemplatesNotFound` exception.
- .. versionadded:: 2.3
+ .. versionchanged:: 2.11
+ If names is :class:`Undefined`, an :exc:`UndefinedError` is
+ raised instead. If no templates were found and names
+ contains :class:`Undefined`, the message is more helpful.
.. versionchanged:: 2.4
If `names` contains a :class:`Template` object it is returned
from the function unchanged.
+
+ .. versionadded:: 2.3
"""
+ if isinstance(names, Undefined):
+ names._fail_with_undefined_error()
+
if not names:
- raise TemplatesNotFound(message=u'Tried to select from an empty list '
- u'of templates.')
+ raise TemplatesNotFound(
+ message=u"Tried to select from an empty list " u"of templates."
+ )
globals = self.make_globals(globals)
for name in names:
if isinstance(name, Template):
@@ -852,20 +914,19 @@ class Environment(object):
name = self.join_path(name, parent)
try:
return self._load_template(name, globals)
- except TemplateNotFound:
+ except (TemplateNotFound, UndefinedError):
pass
raise TemplatesNotFound(names)
@internalcode
- def get_or_select_template(self, template_name_or_list,
- parent=None, globals=None):
+ def get_or_select_template(self, template_name_or_list, parent=None, globals=None):
"""Does a typecheck and dispatches to :meth:`select_template`
if an iterable of template names is given, otherwise to
:meth:`get_template`.
.. versionadded:: 2.3
"""
- if isinstance(template_name_or_list, string_types):
+ if isinstance(template_name_or_list, (string_types, Undefined)):
return self.get_template(template_name_or_list, parent, globals)
elif isinstance(template_name_or_list, Template):
return template_name_or_list
@@ -916,32 +977,57 @@ class Template(object):
StopIteration
"""
- def __new__(cls, source,
- block_start_string=BLOCK_START_STRING,
- block_end_string=BLOCK_END_STRING,
- variable_start_string=VARIABLE_START_STRING,
- variable_end_string=VARIABLE_END_STRING,
- comment_start_string=COMMENT_START_STRING,
- comment_end_string=COMMENT_END_STRING,
- line_statement_prefix=LINE_STATEMENT_PREFIX,
- line_comment_prefix=LINE_COMMENT_PREFIX,
- trim_blocks=TRIM_BLOCKS,
- lstrip_blocks=LSTRIP_BLOCKS,
- newline_sequence=NEWLINE_SEQUENCE,
- keep_trailing_newline=KEEP_TRAILING_NEWLINE,
- extensions=(),
- optimized=True,
- undefined=Undefined,
- finalize=None,
- autoescape=False,
- enable_async=False):
+ #: Type of environment to create when creating a template directly
+ #: rather than through an existing environment.
+ environment_class = Environment
+
+ def __new__(
+ cls,
+ source,
+ block_start_string=BLOCK_START_STRING,
+ block_end_string=BLOCK_END_STRING,
+ variable_start_string=VARIABLE_START_STRING,
+ variable_end_string=VARIABLE_END_STRING,
+ comment_start_string=COMMENT_START_STRING,
+ comment_end_string=COMMENT_END_STRING,
+ line_statement_prefix=LINE_STATEMENT_PREFIX,
+ line_comment_prefix=LINE_COMMENT_PREFIX,
+ trim_blocks=TRIM_BLOCKS,
+ lstrip_blocks=LSTRIP_BLOCKS,
+ newline_sequence=NEWLINE_SEQUENCE,
+ keep_trailing_newline=KEEP_TRAILING_NEWLINE,
+ extensions=(),
+ optimized=True,
+ undefined=Undefined,
+ finalize=None,
+ autoescape=False,
+ enable_async=False,
+ ):
env = get_spontaneous_environment(
- block_start_string, block_end_string, variable_start_string,
- variable_end_string, comment_start_string, comment_end_string,
- line_statement_prefix, line_comment_prefix, trim_blocks,
- lstrip_blocks, newline_sequence, keep_trailing_newline,
- frozenset(extensions), optimized, undefined, finalize, autoescape,
- None, 0, False, None, enable_async)
+ cls.environment_class,
+ block_start_string,
+ block_end_string,
+ variable_start_string,
+ variable_end_string,
+ comment_start_string,
+ comment_end_string,
+ line_statement_prefix,
+ line_comment_prefix,
+ trim_blocks,
+ lstrip_blocks,
+ newline_sequence,
+ keep_trailing_newline,
+ frozenset(extensions),
+ optimized,
+ undefined,
+ finalize,
+ autoescape,
+ None,
+ 0,
+ False,
+ None,
+ enable_async,
+ )
return env.from_string(source, template_class=cls)
@classmethod
@@ -949,10 +1035,7 @@ class Template(object):
"""Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object.
"""
- namespace = {
- 'environment': environment,
- '__file__': code.co_filename
- }
+ namespace = {"environment": environment, "__file__": code.co_filename}
exec(code, namespace)
rv = cls._from_namespace(environment, namespace, globals)
rv._uptodate = uptodate
@@ -972,21 +1055,21 @@ class Template(object):
t = object.__new__(cls)
t.environment = environment
t.globals = globals
- t.name = namespace['name']
- t.filename = namespace['__file__']
- t.blocks = namespace['blocks']
+ t.name = namespace["name"]
+ t.filename = namespace["__file__"]
+ t.blocks = namespace["blocks"]
# render function and module
- t.root_render_func = namespace['root']
+ t.root_render_func = namespace["root"]
t._module = None
# debug and loader helpers
- t._debug_info = namespace['debug_info']
+ t._debug_info = namespace["debug_info"]
t._uptodate = None
# store the reference
- namespace['environment'] = environment
- namespace['__jinja_template__'] = t
+ namespace["environment"] = environment
+ namespace["__jinja_template__"] = t
return t
@@ -1004,8 +1087,7 @@ class Template(object):
try:
return concat(self.root_render_func(self.new_context(vars)))
except Exception:
- exc_info = sys.exc_info()
- return self.environment.handle_exception(exc_info, True)
+ self.environment.handle_exception()
def render_async(self, *args, **kwargs):
"""This works similar to :meth:`render` but returns a coroutine
@@ -1017,8 +1099,9 @@ class Template(object):
await template.render_async(knights='that say nih; asynchronously')
"""
# see asyncsupport for the actual implementation
- raise NotImplementedError('This feature is not available for this '
- 'version of Python')
+ raise NotImplementedError(
+ "This feature is not available for this version of Python"
+ )
def stream(self, *args, **kwargs):
"""Works exactly like :meth:`generate` but returns a
@@ -1039,29 +1122,28 @@ class Template(object):
for event in self.root_render_func(self.new_context(vars)):
yield event
except Exception:
- exc_info = sys.exc_info()
- else:
- return
- yield self.environment.handle_exception(exc_info, True)
+ yield self.environment.handle_exception()
def generate_async(self, *args, **kwargs):
"""An async version of :meth:`generate`. Works very similarly but
returns an async iterator instead.
"""
# see asyncsupport for the actual implementation
- raise NotImplementedError('This feature is not available for this '
- 'version of Python')
+ raise NotImplementedError(
+ "This feature is not available for this version of Python"
+ )
def new_context(self, vars=None, shared=False, locals=None):
"""Create a new :class:`Context` for this template. The vars
provided will be passed to the template. Per default the globals
are added to the context. If shared is set to `True` the data
- is passed as it to the context without adding the globals.
+ is passed as is to the context without adding the globals.
`locals` can be a dict of local variables for internal usage.
"""
- return new_context(self.environment, self.name, self.blocks,
- vars, shared, self.globals, locals)
+ return new_context(
+ self.environment, self.name, self.blocks, vars, shared, self.globals, locals
+ )
def make_module(self, vars=None, shared=False, locals=None):
"""This method works like the :attr:`module` attribute when called
@@ -1074,13 +1156,14 @@ class Template(object):
def make_module_async(self, vars=None, shared=False, locals=None):
"""As template module creation can invoke template code for
- asynchronous exections this method must be used instead of the
+ asynchronous executions this method must be used instead of the
normal :meth:`make_module` one. Likewise the module attribute
becomes unavailable in async mode.
"""
# see asyncsupport for the actual implementation
- raise NotImplementedError('This feature is not available for this '
- 'version of Python')
+ raise NotImplementedError(
+ "This feature is not available for this version of Python"
+ )
@internalcode
def _get_default_module(self):
@@ -1124,15 +1207,16 @@ class Template(object):
@property
def debug_info(self):
"""The debug info mapping."""
- return [tuple(imap(int, x.split('='))) for x in
- self._debug_info.split('&')]
+ if self._debug_info:
+ return [tuple(map(int, x.split("="))) for x in self._debug_info.split("&")]
+ return []
def __repr__(self):
if self.name is None:
- name = 'memory:%x' % id(self)
+ name = "memory:%x" % id(self)
else:
name = repr(self.name)
- return '<%s %s>' % (self.__class__.__name__, name)
+ return "<%s %s>" % (self.__class__.__name__, name)
@implements_to_string
@@ -1145,10 +1229,12 @@ class TemplateModule(object):
def __init__(self, template, context, body_stream=None):
if body_stream is None:
if context.environment.is_async:
- raise RuntimeError('Async mode requires a body stream '
- 'to be passed to a template module. Use '
- 'the async methods of the API you are '
- 'using.')
+ raise RuntimeError(
+ "Async mode requires a body stream "
+ "to be passed to a template module. Use "
+ "the async methods of the API you are "
+ "using."
+ )
body_stream = list(template.root_render_func(context))
self._body_stream = body_stream
self.__dict__.update(context.get_exported())
@@ -1162,10 +1248,10 @@ class TemplateModule(object):
def __repr__(self):
if self.__name__ is None:
- name = 'memory:%x' % id(self)
+ name = "memory:%x" % id(self)
else:
name = repr(self.__name__)
- return '<%s %s>' % (self.__class__.__name__, name)
+ return "<%s %s>" % (self.__class__.__name__, name)
class TemplateExpression(object):
@@ -1181,7 +1267,7 @@ class TemplateExpression(object):
def __call__(self, *args, **kwargs):
context = self._template.new_context(dict(*args, **kwargs))
consume(self._template.root_render_func(context))
- rv = context.vars['result']
+ rv = context.vars["result"]
if self._undefined_to_none and isinstance(rv, Undefined):
rv = None
return rv
@@ -1203,7 +1289,7 @@ class TemplateStream(object):
self._gen = gen
self.disable_buffering()
- def dump(self, fp, encoding=None, errors='strict'):
+ def dump(self, fp, encoding=None, errors="strict"):
"""Dump the complete stream into a file or file-like object.
Per default unicode strings are written, if you want to encode
before writing specify an `encoding`.
@@ -1215,15 +1301,15 @@ class TemplateStream(object):
close = False
if isinstance(fp, string_types):
if encoding is None:
- encoding = 'utf-8'
- fp = open(fp, 'wb')
+ encoding = "utf-8"
+ fp = open(fp, "wb")
close = True
try:
if encoding is not None:
iterable = (x.encode(encoding, errors) for x in self)
else:
iterable = self
- if hasattr(fp, 'writelines'):
+ if hasattr(fp, "writelines"):
fp.writelines(iterable)
else:
for item in iterable:
@@ -1259,7 +1345,7 @@ class TemplateStream(object):
def enable_buffering(self, size=5):
"""Enable buffering. Buffer `size` items before yielding them."""
if size <= 1:
- raise ValueError('buffer size too small')
+ raise ValueError("buffer size too small")
self.buffered = True
self._next = partial(next, self._buffered_generator(size))
diff --git a/lib/spack/external/jinja2/exceptions.py b/lib/spack/external/jinja2/exceptions.py
index c018a33e32..0bf2003e30 100644
--- a/lib/spack/external/jinja2/exceptions.py
+++ b/lib/spack/external/jinja2/exceptions.py
@@ -1,23 +1,18 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.exceptions
- ~~~~~~~~~~~~~~~~~
-
- Jinja exceptions.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-from jinja2._compat import imap, text_type, PY2, implements_to_string
+from ._compat import imap
+from ._compat import implements_to_string
+from ._compat import PY2
+from ._compat import text_type
class TemplateError(Exception):
"""Baseclass for all template errors."""
if PY2:
+
def __init__(self, message=None):
if message is not None:
- message = text_type(message).encode('utf-8')
+ message = text_type(message).encode("utf-8")
Exception.__init__(self, message)
@property
@@ -25,11 +20,13 @@ class TemplateError(Exception):
if self.args:
message = self.args[0]
if message is not None:
- return message.decode('utf-8', 'replace')
+ return message.decode("utf-8", "replace")
def __unicode__(self):
- return self.message or u''
+ return self.message or u""
+
else:
+
def __init__(self, message=None):
Exception.__init__(self, message)
@@ -43,16 +40,28 @@ class TemplateError(Exception):
@implements_to_string
class TemplateNotFound(IOError, LookupError, TemplateError):
- """Raised if a template does not exist."""
+ """Raised if a template does not exist.
+
+ .. versionchanged:: 2.11
+ If the given name is :class:`Undefined` and no message was
+ provided, an :exc:`UndefinedError` is raised.
+ """
# looks weird, but removes the warning descriptor that just
# bogusly warns us about message being deprecated
message = None
def __init__(self, name, message=None):
- IOError.__init__(self)
+ IOError.__init__(self, name)
+
if message is None:
+ from .runtime import Undefined
+
+ if isinstance(name, Undefined):
+ name._fail_with_undefined_error()
+
message = name
+
self.message = message
self.name = name
self.templates = [name]
@@ -66,13 +75,28 @@ class TemplatesNotFound(TemplateNotFound):
are selected. This is a subclass of :class:`TemplateNotFound`
exception, so just catching the base exception will catch both.
+ .. versionchanged:: 2.11
+ If a name in the list of names is :class:`Undefined`, a message
+ about it being undefined is shown rather than the empty string.
+
.. versionadded:: 2.2
"""
def __init__(self, names=(), message=None):
if message is None:
- message = u'none of the templates given were found: ' + \
- u', '.join(imap(text_type, names))
+ from .runtime import Undefined
+
+ parts = []
+
+ for name in names:
+ if isinstance(name, Undefined):
+ parts.append(name._undefined_message)
+ else:
+ parts.append(name)
+
+ message = u"none of the templates given were found: " + u", ".join(
+ imap(text_type, parts)
+ )
TemplateNotFound.__init__(self, names and names[-1] or None, message)
self.templates = list(names)
@@ -98,11 +122,11 @@ class TemplateSyntaxError(TemplateError):
return self.message
# otherwise attach some stuff
- location = 'line %d' % self.lineno
+ location = "line %d" % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
- lines = [self.message, ' ' + location]
+ lines = [self.message, " " + location]
# if the source is set, add the line to the output
if self.source is not None:
@@ -111,9 +135,16 @@ class TemplateSyntaxError(TemplateError):
except IndexError:
line = None
if line:
- lines.append(' ' + line.strip())
+ lines.append(" " + line.strip())
+
+ return u"\n".join(lines)
- return u'\n'.join(lines)
+ def __reduce__(self):
+ # https://bugs.python.org/issue1692335 Exceptions that take
+ # multiple required arguments have problems with pickling.
+ # Without this, raises TypeError: __init__() missing 1 required
+ # positional argument: 'lineno'
+ return self.__class__, (self.message, self.lineno, self.name, self.filename)
class TemplateAssertionError(TemplateSyntaxError):
diff --git a/lib/spack/external/jinja2/ext.py b/lib/spack/external/jinja2/ext.py
index 0734a84f73..9141be4dac 100644
--- a/lib/spack/external/jinja2/ext.py
+++ b/lib/spack/external/jinja2/ext.py
@@ -1,42 +1,49 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.ext
- ~~~~~~~~~~
-
- Jinja extensions allow to add custom tags similar to the way django custom
- tags work. By default two example extensions exist: an i18n and a cache
- extension.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD.
-"""
+"""Extension API for adding custom tags and behavior."""
+import pprint
import re
-
-from jinja2 import nodes
-from jinja2.defaults import BLOCK_START_STRING, \
- BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
- COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
- LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
- KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
-from jinja2.environment import Environment
-from jinja2.runtime import concat
-from jinja2.exceptions import TemplateAssertionError, TemplateSyntaxError
-from jinja2.utils import contextfunction, import_string, Markup
-from jinja2._compat import with_metaclass, string_types, iteritems
-
+from sys import version_info
+
+from markupsafe import Markup
+
+from . import nodes
+from ._compat import iteritems
+from ._compat import string_types
+from ._compat import with_metaclass
+from .defaults import BLOCK_END_STRING
+from .defaults import BLOCK_START_STRING
+from .defaults import COMMENT_END_STRING
+from .defaults import COMMENT_START_STRING
+from .defaults import KEEP_TRAILING_NEWLINE
+from .defaults import LINE_COMMENT_PREFIX
+from .defaults import LINE_STATEMENT_PREFIX
+from .defaults import LSTRIP_BLOCKS
+from .defaults import NEWLINE_SEQUENCE
+from .defaults import TRIM_BLOCKS
+from .defaults import VARIABLE_END_STRING
+from .defaults import VARIABLE_START_STRING
+from .environment import Environment
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateSyntaxError
+from .nodes import ContextReference
+from .runtime import concat
+from .utils import contextfunction
+from .utils import import_string
# the only real useful gettext functions for a Jinja template. Note
# that ugettext must be assigned to gettext as Jinja doesn't support
# non unicode strings.
-GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext')
+GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext")
+
+_ws_re = re.compile(r"\s*\n\s*")
class ExtensionRegistry(type):
"""Gives the extension an unique identifier."""
- def __new__(cls, name, bases, d):
- rv = type.__new__(cls, name, bases, d)
- rv.identifier = rv.__module__ + '.' + rv.__name__
+ def __new__(mcs, name, bases, d):
+ rv = type.__new__(mcs, name, bases, d)
+ rv.identifier = rv.__module__ + "." + rv.__name__
return rv
@@ -91,10 +98,6 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
to filter tokens returned. This method has to return an iterable of
:class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
:class:`~jinja2.lexer.TokenStream`.
-
- In the `ext` folder of the Jinja2 source distribution there is a file
- called `inlinegettext.py` which implements a filter that utilizes this
- method.
"""
return stream
@@ -116,8 +119,9 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
"""
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
- def call_method(self, name, args=None, kwargs=None, dyn_args=None,
- dyn_kwargs=None, lineno=None):
+ def call_method(
+ self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None
+ ):
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
"""
@@ -125,13 +129,19 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
args = []
if kwargs is None:
kwargs = []
- return nodes.Call(self.attr(name, lineno=lineno), args, kwargs,
- dyn_args, dyn_kwargs, lineno=lineno)
+ return nodes.Call(
+ self.attr(name, lineno=lineno),
+ args,
+ kwargs,
+ dyn_args,
+ dyn_kwargs,
+ lineno=lineno,
+ )
@contextfunction
def _gettext_alias(__context, *args, **kwargs):
- return __context.call(__context.resolve('gettext'), *args, **kwargs)
+ return __context.call(__context.resolve("gettext"), *args, **kwargs)
def _make_new_gettext(func):
@@ -140,24 +150,31 @@ def _make_new_gettext(func):
rv = __context.call(func, __string)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
+ # Always treat as a format string, even if there are no
+ # variables. This makes translation strings more consistent
+ # and predictable. This requires escaping
return rv % variables
+
return gettext
def _make_new_ngettext(func):
@contextfunction
def ngettext(__context, __singular, __plural, __num, **variables):
- variables.setdefault('num', __num)
+ variables.setdefault("num", __num)
rv = __context.call(func, __singular, __plural, __num)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
+ # Always treat as a format string, see gettext comment above.
return rv % variables
+
return ngettext
class InternationalizationExtension(Extension):
- """This extension adds gettext support to Jinja2."""
- tags = set(['trans'])
+ """This extension adds gettext support to Jinja."""
+
+ tags = {"trans"}
# TODO: the i18n extension is currently reevaluating values in a few
# situations. Take this example:
@@ -168,30 +185,28 @@ class InternationalizationExtension(Extension):
def __init__(self, environment):
Extension.__init__(self, environment)
- environment.globals['_'] = _gettext_alias
+ environment.globals["_"] = _gettext_alias
environment.extend(
install_gettext_translations=self._install,
install_null_translations=self._install_null,
install_gettext_callables=self._install_callables,
uninstall_gettext_translations=self._uninstall,
extract_translations=self._extract,
- newstyle_gettext=False
+ newstyle_gettext=False,
)
def _install(self, translations, newstyle=None):
- gettext = getattr(translations, 'ugettext', None)
+ gettext = getattr(translations, "ugettext", None)
if gettext is None:
gettext = translations.gettext
- ngettext = getattr(translations, 'ungettext', None)
+ ngettext = getattr(translations, "ungettext", None)
if ngettext is None:
ngettext = translations.ngettext
self._install_callables(gettext, ngettext, newstyle)
def _install_null(self, newstyle=None):
self._install_callables(
- lambda x: x,
- lambda s, p, n: (n != 1 and (p,) or (s,))[0],
- newstyle
+ lambda x: x, lambda s, p, n: (n != 1 and (p,) or (s,))[0], newstyle
)
def _install_callables(self, gettext, ngettext, newstyle=None):
@@ -200,13 +215,10 @@ class InternationalizationExtension(Extension):
if self.environment.newstyle_gettext:
gettext = _make_new_gettext(gettext)
ngettext = _make_new_ngettext(ngettext)
- self.environment.globals.update(
- gettext=gettext,
- ngettext=ngettext
- )
+ self.environment.globals.update(gettext=gettext, ngettext=ngettext)
def _uninstall(self, translations):
- for key in 'gettext', 'ngettext':
+ for key in "gettext", "ngettext":
self.environment.globals.pop(key, None)
def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
@@ -226,41 +238,44 @@ class InternationalizationExtension(Extension):
plural_expr_assignment = None
variables = {}
trimmed = None
- while parser.stream.current.type != 'block_end':
+ while parser.stream.current.type != "block_end":
if variables:
- parser.stream.expect('comma')
+ parser.stream.expect("comma")
# skip colon for python compatibility
- if parser.stream.skip_if('colon'):
+ if parser.stream.skip_if("colon"):
break
- name = parser.stream.expect('name')
+ name = parser.stream.expect("name")
if name.value in variables:
- parser.fail('translatable variable %r defined twice.' %
- name.value, name.lineno,
- exc=TemplateAssertionError)
+ parser.fail(
+ "translatable variable %r defined twice." % name.value,
+ name.lineno,
+ exc=TemplateAssertionError,
+ )
# expressions
- if parser.stream.current.type == 'assign':
+ if parser.stream.current.type == "assign":
next(parser.stream)
variables[name.value] = var = parser.parse_expression()
- elif trimmed is None and name.value in ('trimmed', 'notrimmed'):
- trimmed = name.value == 'trimmed'
+ elif trimmed is None and name.value in ("trimmed", "notrimmed"):
+ trimmed = name.value == "trimmed"
continue
else:
- variables[name.value] = var = nodes.Name(name.value, 'load')
+ variables[name.value] = var = nodes.Name(name.value, "load")
if plural_expr is None:
if isinstance(var, nodes.Call):
- plural_expr = nodes.Name('_trans', 'load')
+ plural_expr = nodes.Name("_trans", "load")
variables[name.value] = plural_expr
plural_expr_assignment = nodes.Assign(
- nodes.Name('_trans', 'store'), var)
+ nodes.Name("_trans", "store"), var
+ )
else:
plural_expr = var
- num_called_num = name.value == 'num'
+ num_called_num = name.value == "num"
- parser.stream.expect('block_end')
+ parser.stream.expect("block_end")
plural = None
have_plural = False
@@ -271,22 +286,24 @@ class InternationalizationExtension(Extension):
if singular_names:
referenced.update(singular_names)
if plural_expr is None:
- plural_expr = nodes.Name(singular_names[0], 'load')
- num_called_num = singular_names[0] == 'num'
+ plural_expr = nodes.Name(singular_names[0], "load")
+ num_called_num = singular_names[0] == "num"
# if we have a pluralize block, we parse that too
- if parser.stream.current.test('name:pluralize'):
+ if parser.stream.current.test("name:pluralize"):
have_plural = True
next(parser.stream)
- if parser.stream.current.type != 'block_end':
- name = parser.stream.expect('name')
+ if parser.stream.current.type != "block_end":
+ name = parser.stream.expect("name")
if name.value not in variables:
- parser.fail('unknown variable %r for pluralization' %
- name.value, name.lineno,
- exc=TemplateAssertionError)
+ parser.fail(
+ "unknown variable %r for pluralization" % name.value,
+ name.lineno,
+ exc=TemplateAssertionError,
+ )
plural_expr = variables[name.value]
- num_called_num = name.value == 'num'
- parser.stream.expect('block_end')
+ num_called_num = name.value == "num"
+ parser.stream.expect("block_end")
plural_names, plural = self._parse_block(parser, False)
next(parser.stream)
referenced.update(plural_names)
@@ -296,88 +313,97 @@ class InternationalizationExtension(Extension):
# register free names as simple name expressions
for var in referenced:
if var not in variables:
- variables[var] = nodes.Name(var, 'load')
+ variables[var] = nodes.Name(var, "load")
if not have_plural:
plural_expr = None
elif plural_expr is None:
- parser.fail('pluralize without variables', lineno)
+ parser.fail("pluralize without variables", lineno)
if trimmed is None:
- trimmed = self.environment.policies['ext.i18n.trimmed']
+ trimmed = self.environment.policies["ext.i18n.trimmed"]
if trimmed:
singular = self._trim_whitespace(singular)
if plural:
plural = self._trim_whitespace(plural)
- node = self._make_node(singular, plural, variables, plural_expr,
- bool(referenced),
- num_called_num and have_plural)
+ node = self._make_node(
+ singular,
+ plural,
+ variables,
+ plural_expr,
+ bool(referenced),
+ num_called_num and have_plural,
+ )
node.set_lineno(lineno)
if plural_expr_assignment is not None:
return [plural_expr_assignment, node]
else:
return node
- def _trim_whitespace(self, string, _ws_re=re.compile(r'\s*\n\s*')):
- return _ws_re.sub(' ', string.strip())
+ def _trim_whitespace(self, string, _ws_re=_ws_re):
+ return _ws_re.sub(" ", string.strip())
def _parse_block(self, parser, allow_pluralize):
"""Parse until the next block tag with a given name."""
referenced = []
buf = []
while 1:
- if parser.stream.current.type == 'data':
- buf.append(parser.stream.current.value.replace('%', '%%'))
+ if parser.stream.current.type == "data":
+ buf.append(parser.stream.current.value.replace("%", "%%"))
next(parser.stream)
- elif parser.stream.current.type == 'variable_begin':
+ elif parser.stream.current.type == "variable_begin":
next(parser.stream)
- name = parser.stream.expect('name').value
+ name = parser.stream.expect("name").value
referenced.append(name)
- buf.append('%%(%s)s' % name)
- parser.stream.expect('variable_end')
- elif parser.stream.current.type == 'block_begin':
+ buf.append("%%(%s)s" % name)
+ parser.stream.expect("variable_end")
+ elif parser.stream.current.type == "block_begin":
next(parser.stream)
- if parser.stream.current.test('name:endtrans'):
+ if parser.stream.current.test("name:endtrans"):
break
- elif parser.stream.current.test('name:pluralize'):
+ elif parser.stream.current.test("name:pluralize"):
if allow_pluralize:
break
- parser.fail('a translatable section can have only one '
- 'pluralize section')
- parser.fail('control structures in translatable sections are '
- 'not allowed')
+ parser.fail(
+ "a translatable section can have only one pluralize section"
+ )
+ parser.fail(
+ "control structures in translatable sections are not allowed"
+ )
elif parser.stream.eos:
- parser.fail('unclosed translation block')
+ parser.fail("unclosed translation block")
else:
- assert False, 'internal parser error'
+ raise RuntimeError("internal parser error")
return referenced, concat(buf)
- def _make_node(self, singular, plural, variables, plural_expr,
- vars_referenced, num_called_num):
+ def _make_node(
+ self, singular, plural, variables, plural_expr, vars_referenced, num_called_num
+ ):
"""Generates a useful node from the data provided."""
# no variables referenced? no need to escape for old style
# gettext invocations only if there are vars.
if not vars_referenced and not self.environment.newstyle_gettext:
- singular = singular.replace('%%', '%')
+ singular = singular.replace("%%", "%")
if plural:
- plural = plural.replace('%%', '%')
+ plural = plural.replace("%%", "%")
# singular only:
if plural_expr is None:
- gettext = nodes.Name('gettext', 'load')
- node = nodes.Call(gettext, [nodes.Const(singular)],
- [], None, None)
+ gettext = nodes.Name("gettext", "load")
+ node = nodes.Call(gettext, [nodes.Const(singular)], [], None, None)
# singular and plural
else:
- ngettext = nodes.Name('ngettext', 'load')
- node = nodes.Call(ngettext, [
- nodes.Const(singular),
- nodes.Const(plural),
- plural_expr
- ], [], None, None)
+ ngettext = nodes.Name("ngettext", "load")
+ node = nodes.Call(
+ ngettext,
+ [nodes.Const(singular), nodes.Const(plural), plural_expr],
+ [],
+ None,
+ None,
+ )
# in case newstyle gettext is used, the method is powerful
# enough to handle the variable expansion and autoescape
@@ -386,7 +412,7 @@ class InternationalizationExtension(Extension):
for key, value in iteritems(variables):
# the function adds that later anyways in case num was
# called num, so just skip it.
- if num_called_num and key == 'num':
+ if num_called_num and key == "num":
continue
node.kwargs.append(nodes.Keyword(key, value))
@@ -396,18 +422,24 @@ class InternationalizationExtension(Extension):
# environment with autoescaping turned on
node = nodes.MarkSafeIfAutoescape(node)
if variables:
- node = nodes.Mod(node, nodes.Dict([
- nodes.Pair(nodes.Const(key), value)
- for key, value in variables.items()
- ]))
+ node = nodes.Mod(
+ node,
+ nodes.Dict(
+ [
+ nodes.Pair(nodes.Const(key), value)
+ for key, value in variables.items()
+ ]
+ ),
+ )
return nodes.Output([node])
class ExprStmtExtension(Extension):
- """Adds a `do` tag to Jinja2 that works like the print statement just
+ """Adds a `do` tag to Jinja that works like the print statement just
that it doesn't print the return value.
"""
- tags = set(['do'])
+
+ tags = set(["do"])
def parse(self, parser):
node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
@@ -417,11 +449,12 @@ class ExprStmtExtension(Extension):
class LoopControlExtension(Extension):
"""Adds break and continue to the template engine."""
- tags = set(['break', 'continue'])
+
+ tags = set(["break", "continue"])
def parse(self, parser):
token = next(parser.stream)
- if token.value == 'break':
+ if token.value == "break":
return nodes.Break(lineno=token.lineno)
return nodes.Continue(lineno=token.lineno)
@@ -434,8 +467,50 @@ class AutoEscapeExtension(Extension):
pass
-def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS,
- babel_style=True):
+class DebugExtension(Extension):
+ """A ``{% debug %}`` tag that dumps the available variables,
+ filters, and tests.
+
+ .. code-block:: html+jinja
+
+ <pre>{% debug %}</pre>
+
+ .. code-block:: text
+
+ {'context': {'cycler': <class 'jinja2.utils.Cycler'>,
+ ...,
+ 'namespace': <class 'jinja2.utils.Namespace'>},
+ 'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
+ ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
+ 'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
+ ..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']}
+
+ .. versionadded:: 2.11.0
+ """
+
+ tags = {"debug"}
+
+ def parse(self, parser):
+ lineno = parser.stream.expect("name:debug").lineno
+ context = ContextReference()
+ result = self.call_method("_render", [context], lineno=lineno)
+ return nodes.Output([result], lineno=lineno)
+
+ def _render(self, context):
+ result = {
+ "context": context.get_all(),
+ "filters": sorted(self.environment.filters.keys()),
+ "tests": sorted(self.environment.tests.keys()),
+ }
+
+ # Set the depth since the intent is to show the top few names.
+ if version_info[:2] >= (3, 4):
+ return pprint.pformat(result, depth=3, compact=True)
+ else:
+ return pprint.pformat(result, depth=3)
+
+
+def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True):
"""Extract localizable strings from the given template node. Per
default this function returns matches in babel style that means non string
parameters as well as keyword arguments are returned as `None`. This
@@ -471,19 +546,20 @@ def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS,
extraction interface or extract comments yourself.
"""
for node in node.find_all(nodes.Call):
- if not isinstance(node.node, nodes.Name) or \
- node.node.name not in gettext_functions:
+ if (
+ not isinstance(node.node, nodes.Name)
+ or node.node.name not in gettext_functions
+ ):
continue
strings = []
for arg in node.args:
- if isinstance(arg, nodes.Const) and \
- isinstance(arg.value, string_types):
+ if isinstance(arg, nodes.Const) and isinstance(arg.value, string_types):
strings.append(arg.value)
else:
strings.append(None)
- for arg in node.kwargs:
+ for _ in node.kwargs:
strings.append(None)
if node.dyn_args is not None:
strings.append(None)
@@ -517,9 +593,10 @@ class _CommentFinder(object):
def find_backwards(self, offset):
try:
- for _, token_type, token_value in \
- reversed(self.tokens[self.offset:offset]):
- if token_type in ('comment', 'linecomment'):
+ for _, token_type, token_value in reversed(
+ self.tokens[self.offset : offset]
+ ):
+ if token_type in ("comment", "linecomment"):
try:
prefix, comment = token_value.split(None, 1)
except ValueError:
@@ -533,7 +610,7 @@ class _CommentFinder(object):
def find_comments(self, lineno):
if not self.comment_tags or self.last_lineno > lineno:
return []
- for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset:]):
+ for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
if token_lineno > lineno:
return self.find_backwards(self.offset + idx)
return self.find_backwards(len(self.tokens))
@@ -545,7 +622,7 @@ def babel_extract(fileobj, keywords, comment_tags, options):
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
- try to find the best preceeding comment that begins with one of the
+ try to find the best preceding comment that begins with one of the
keywords. For best results, make sure to not have more than one
gettext call in one line of code and the matching comment in the
same line or the line before.
@@ -568,7 +645,7 @@ def babel_extract(fileobj, keywords, comment_tags, options):
(comments will be empty currently)
"""
extensions = set()
- for extension in options.get('extensions', '').split(','):
+ for extension in options.get("extensions", "").split(","):
extension = extension.strip()
if not extension:
continue
@@ -577,38 +654,37 @@ def babel_extract(fileobj, keywords, comment_tags, options):
extensions.add(InternationalizationExtension)
def getbool(options, key, default=False):
- return options.get(key, str(default)).lower() in \
- ('1', 'on', 'yes', 'true')
+ return options.get(key, str(default)).lower() in ("1", "on", "yes", "true")
- silent = getbool(options, 'silent', True)
+ silent = getbool(options, "silent", True)
environment = Environment(
- options.get('block_start_string', BLOCK_START_STRING),
- options.get('block_end_string', BLOCK_END_STRING),
- options.get('variable_start_string', VARIABLE_START_STRING),
- options.get('variable_end_string', VARIABLE_END_STRING),
- options.get('comment_start_string', COMMENT_START_STRING),
- options.get('comment_end_string', COMMENT_END_STRING),
- options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX,
- options.get('line_comment_prefix') or LINE_COMMENT_PREFIX,
- getbool(options, 'trim_blocks', TRIM_BLOCKS),
- getbool(options, 'lstrip_blocks', LSTRIP_BLOCKS),
+ options.get("block_start_string", BLOCK_START_STRING),
+ options.get("block_end_string", BLOCK_END_STRING),
+ options.get("variable_start_string", VARIABLE_START_STRING),
+ options.get("variable_end_string", VARIABLE_END_STRING),
+ options.get("comment_start_string", COMMENT_START_STRING),
+ options.get("comment_end_string", COMMENT_END_STRING),
+ options.get("line_statement_prefix") or LINE_STATEMENT_PREFIX,
+ options.get("line_comment_prefix") or LINE_COMMENT_PREFIX,
+ getbool(options, "trim_blocks", TRIM_BLOCKS),
+ getbool(options, "lstrip_blocks", LSTRIP_BLOCKS),
NEWLINE_SEQUENCE,
- getbool(options, 'keep_trailing_newline', KEEP_TRAILING_NEWLINE),
+ getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE),
frozenset(extensions),
cache_size=0,
- auto_reload=False
+ auto_reload=False,
)
- if getbool(options, 'trimmed'):
- environment.policies['ext.i18n.trimmed'] = True
- if getbool(options, 'newstyle_gettext'):
+ if getbool(options, "trimmed"):
+ environment.policies["ext.i18n.trimmed"] = True
+ if getbool(options, "newstyle_gettext"):
environment.newstyle_gettext = True
- source = fileobj.read().decode(options.get('encoding', 'utf-8'))
+ source = fileobj.read().decode(options.get("encoding", "utf-8"))
try:
node = environment.parse(source)
tokens = list(environment.lex(environment.preprocess(source)))
- except TemplateSyntaxError as e:
+ except TemplateSyntaxError:
if not silent:
raise
# skip templates with syntax errors
@@ -625,3 +701,4 @@ do = ExprStmtExtension
loopcontrols = LoopControlExtension
with_ = WithExtension
autoescape = AutoEscapeExtension
+debug = DebugExtension
diff --git a/lib/spack/external/jinja2/filters.py b/lib/spack/external/jinja2/filters.py
index 267ddddaa0..74b108dcec 100644
--- a/lib/spack/external/jinja2/filters.py
+++ b/lib/spack/external/jinja2/filters.py
@@ -1,29 +1,31 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.filters
- ~~~~~~~~~~~~~~
-
- Bundled jinja filters.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-import re
+"""Built-in template filters used with the ``|`` operator."""
import math
import random
+import re
import warnings
-
-from itertools import groupby, chain
from collections import namedtuple
-from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \
- unicode_urlencode, htmlsafe_json_dumps
-from jinja2.runtime import Undefined
-from jinja2.exceptions import FilterArgumentError
-from jinja2._compat import imap, string_types, text_type, iteritems, PY2
+from itertools import chain
+from itertools import groupby
+
+from markupsafe import escape
+from markupsafe import Markup
+from markupsafe import soft_unicode
+from ._compat import abc
+from ._compat import imap
+from ._compat import iteritems
+from ._compat import string_types
+from ._compat import text_type
+from .exceptions import FilterArgumentError
+from .runtime import Undefined
+from .utils import htmlsafe_json_dumps
+from .utils import pformat
+from .utils import unicode_urlencode
+from .utils import urlize
-_word_re = re.compile(r'\w+', re.UNICODE)
-_word_beginning_split_re = re.compile(r'([-\s\(\{\[\<]+)', re.UNICODE)
+_word_re = re.compile(r"\w+", re.UNICODE)
+_word_beginning_split_re = re.compile(r"([-\s\(\{\[\<]+)", re.UNICODE)
def contextfilter(f):
@@ -59,23 +61,21 @@ def ignore_case(value):
return value.lower() if isinstance(value, string_types) else value
-def make_attrgetter(environment, attribute, postprocess=None):
+def make_attrgetter(environment, attribute, postprocess=None, default=None):
"""Returns a callable that looks up the given attribute from a
passed object with the rules of the environment. Dots are allowed
to access attributes of attributes. Integer parts in paths are
looked up as integers.
"""
- if attribute is None:
- attribute = []
- elif isinstance(attribute, string_types):
- attribute = [int(x) if x.isdigit() else x for x in attribute.split('.')]
- else:
- attribute = [attribute]
+ attribute = _prepare_attribute_parts(attribute)
def attrgetter(item):
for part in attribute:
item = environment.getitem(item, part)
+ if default and isinstance(item, Undefined):
+ item = default
+
if postprocess is not None:
item = postprocess(item)
@@ -84,32 +84,84 @@ def make_attrgetter(environment, attribute, postprocess=None):
return attrgetter
+def make_multi_attrgetter(environment, attribute, postprocess=None):
+ """Returns a callable that looks up the given comma separated
+ attributes from a passed object with the rules of the environment.
+ Dots are allowed to access attributes of each attribute. Integer
+ parts in paths are looked up as integers.
+
+ The value returned by the returned callable is a list of extracted
+ attribute values.
+
+ Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
+ """
+ attribute_parts = (
+ attribute.split(",") if isinstance(attribute, string_types) else [attribute]
+ )
+ attribute = [
+ _prepare_attribute_parts(attribute_part) for attribute_part in attribute_parts
+ ]
+
+ def attrgetter(item):
+ items = [None] * len(attribute)
+ for i, attribute_part in enumerate(attribute):
+ item_i = item
+ for part in attribute_part:
+ item_i = environment.getitem(item_i, part)
+
+ if postprocess is not None:
+ item_i = postprocess(item_i)
+
+ items[i] = item_i
+ return items
+
+ return attrgetter
+
+
+def _prepare_attribute_parts(attr):
+ if attr is None:
+ return []
+ elif isinstance(attr, string_types):
+ return [int(x) if x.isdigit() else x for x in attr.split(".")]
+ else:
+ return [attr]
+
+
def do_forceescape(value):
"""Enforce HTML escaping. This will probably double escape variables."""
- if hasattr(value, '__html__'):
+ if hasattr(value, "__html__"):
value = value.__html__()
return escape(text_type(value))
def do_urlencode(value):
- """Escape strings for use in URLs (uses UTF-8 encoding). It accepts both
- dictionaries and regular strings as well as pairwise iterables.
+ """Quote data for use in a URL path or query using UTF-8.
+
+ Basic wrapper around :func:`urllib.parse.quote` when given a
+ string, or :func:`urllib.parse.urlencode` for a dict or iterable.
+
+ :param value: Data to quote. A string will be quoted directly. A
+ dict or iterable of ``(key, value)`` pairs will be joined as a
+ query string.
+
+ When given a string, "/" is not quoted. HTTP servers treat "/" and
+ "%2F" equivalently in paths. If you need quoted slashes, use the
+ ``|replace("/", "%2F")`` filter.
.. versionadded:: 2.7
"""
- itemiter = None
- if isinstance(value, dict):
- itemiter = iteritems(value)
- elif not isinstance(value, string_types):
- try:
- itemiter = iter(value)
- except TypeError:
- pass
- if itemiter is None:
+ if isinstance(value, string_types) or not isinstance(value, abc.Iterable):
return unicode_urlencode(value)
- return u'&'.join(unicode_urlencode(k) + '=' +
- unicode_urlencode(v, for_qs=True)
- for k, v in itemiter)
+
+ if isinstance(value, dict):
+ items = iteritems(value)
+ else:
+ items = iter(value)
+
+ return u"&".join(
+ "%s=%s" % (unicode_urlencode(k, for_qs=True), unicode_urlencode(v, for_qs=True))
+ for k, v in items
+ )
@evalcontextfilter
@@ -132,8 +184,11 @@ def do_replace(eval_ctx, s, old, new, count=None):
count = -1
if not eval_ctx.autoescape:
return text_type(s).replace(text_type(old), text_type(new), count)
- if hasattr(old, '__html__') or hasattr(new, '__html__') and \
- not hasattr(s, '__html__'):
+ if (
+ hasattr(old, "__html__")
+ or hasattr(new, "__html__")
+ and not hasattr(s, "__html__")
+ ):
s = escape(s)
else:
s = soft_unicode(s)
@@ -174,13 +229,13 @@ def do_xmlattr(_eval_ctx, d, autospace=True):
As you can see it automatically prepends a space in front of the item
if the filter returned something unless the second parameter is false.
"""
- rv = u' '.join(
+ rv = u" ".join(
u'%s="%s"' % (escape(key), escape(value))
for key, value in iteritems(d)
if value is not None and not isinstance(value, Undefined)
)
if autospace and rv:
- rv = u' ' + rv
+ rv = u" " + rv
if _eval_ctx.autoescape:
rv = Markup(rv)
return rv
@@ -197,39 +252,40 @@ def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
- return ''.join(
- [item[0].upper() + item[1:].lower()
- for item in _word_beginning_split_re.split(soft_unicode(s))
- if item])
+ return "".join(
+ [
+ item[0].upper() + item[1:].lower()
+ for item in _word_beginning_split_re.split(soft_unicode(s))
+ if item
+ ]
+ )
-def do_dictsort(value, case_sensitive=False, by='key', reverse=False):
+def do_dictsort(value, case_sensitive=False, by="key", reverse=False):
"""Sort a dict and yield (key, value) pairs. Because python dicts are
unsorted you may want to use this function to order them by either
key or value:
.. sourcecode:: jinja
- {% for item in mydict|dictsort %}
+ {% for key, value in mydict|dictsort %}
sort the dict by key, case insensitive
- {% for item in mydict|dictsort(reverse=true) %}
+ {% for key, value in mydict|dictsort(reverse=true) %}
sort the dict by key, case insensitive, reverse order
- {% for item in mydict|dictsort(true) %}
+ {% for key, value in mydict|dictsort(true) %}
sort the dict by key, case sensitive
- {% for item in mydict|dictsort(false, 'value') %}
+ {% for key, value in mydict|dictsort(false, 'value') %}
sort the dict by value, case insensitive
"""
- if by == 'key':
+ if by == "key":
pos = 0
- elif by == 'value':
+ elif by == "value":
pos = 1
else:
- raise FilterArgumentError(
- 'You can only sort by either "key" or "value"'
- )
+ raise FilterArgumentError('You can only sort by either "key" or "value"')
def sort_func(item):
value = item[pos]
@@ -243,48 +299,62 @@ def do_dictsort(value, case_sensitive=False, by='key', reverse=False):
@environmentfilter
-def do_sort(
- environment, value, reverse=False, case_sensitive=False, attribute=None
-):
- """Sort an iterable. Per default it sorts ascending, if you pass it
- true as first argument it will reverse the sorting.
+def do_sort(environment, value, reverse=False, case_sensitive=False, attribute=None):
+ """Sort an iterable using Python's :func:`sorted`.
+
+ .. sourcecode:: jinja
+
+ {% for city in cities|sort %}
+ ...
+ {% endfor %}
- If the iterable is made of strings the third parameter can be used to
- control the case sensitiveness of the comparison which is disabled by
- default.
+ :param reverse: Sort descending instead of ascending.
+ :param case_sensitive: When sorting strings, sort upper and lower
+ case separately.
+ :param attribute: When sorting objects or dicts, an attribute or
+ key to sort by. Can use dot notation like ``"address.city"``.
+ Can be a list of attributes like ``"age,name"``.
+
+ The sort is stable, it does not change the relative order of
+ elements that compare equal. This makes it is possible to chain
+ sorts on different attributes and ordering.
.. sourcecode:: jinja
- {% for item in iterable|sort %}
+ {% for user in users|sort(attribute="name")
+ |sort(reverse=true, attribute="age") %}
...
{% endfor %}
- It is also possible to sort by an attribute (for example to sort
- by the date of an object) by specifying the `attribute` parameter:
+ As a shortcut to chaining when the direction is the same for all
+ attributes, pass a comma separate list of attributes.
.. sourcecode:: jinja
- {% for item in iterable|sort(attribute='date') %}
+ {% for user users|sort(attribute="age,name") %}
...
{% endfor %}
+ .. versionchanged:: 2.11.0
+ The ``attribute`` parameter can be a comma separated list of
+ attributes, e.g. ``"age,name"``.
+
.. versionchanged:: 2.6
- The `attribute` parameter was added.
+ The ``attribute`` parameter was added.
"""
- key_func = make_attrgetter(
- environment, attribute,
- postprocess=ignore_case if not case_sensitive else None
+ key_func = make_multi_attrgetter(
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
)
return sorted(value, key=key_func, reverse=reverse)
@environmentfilter
def do_unique(environment, value, case_sensitive=False, attribute=None):
- """Returns a list of unique items from the the given iterable.
+ """Returns a list of unique items from the given iterable.
.. sourcecode:: jinja
- {{ ['foo', 'bar', 'foobar', 'FooBar']|unique }}
+ {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }}
-> ['foo', 'bar', 'foobar']
The unique items are yielded in the same order as their first occurrence in
@@ -294,8 +364,7 @@ def do_unique(environment, value, case_sensitive=False, attribute=None):
:param attribute: Filter objects with unique values for this attribute.
"""
getter = make_attrgetter(
- environment, attribute,
- postprocess=ignore_case if not case_sensitive else None
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
)
seen = set()
@@ -313,11 +382,10 @@ def _min_or_max(environment, value, func, case_sensitive, attribute):
try:
first = next(it)
except StopIteration:
- return environment.undefined('No aggregated item, sequence was empty.')
+ return environment.undefined("No aggregated item, sequence was empty.")
key_func = make_attrgetter(
- environment, attribute,
- ignore_case if not case_sensitive else None
+ environment, attribute, postprocess=ignore_case if not case_sensitive else None
)
return func(chain([first], it), key=key_func)
@@ -332,7 +400,7 @@ def do_min(environment, value, case_sensitive=False, attribute=None):
-> 1
:param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Get the object with the max value of this attribute.
+ :param attribute: Get the object with the min value of this attribute.
"""
return _min_or_max(environment, value, min, case_sensitive, attribute)
@@ -352,7 +420,7 @@ def do_max(environment, value, case_sensitive=False, attribute=None):
return _min_or_max(environment, value, max, case_sensitive, attribute)
-def do_default(value, default_value=u'', boolean=False):
+def do_default(value, default_value=u"", boolean=False):
"""If the value is undefined it will return the passed default value,
otherwise the value of the variable:
@@ -368,6 +436,12 @@ def do_default(value, default_value=u'', boolean=False):
.. sourcecode:: jinja
{{ ''|default('the string was empty', true) }}
+
+ .. versionchanged:: 2.11
+ It's now possible to configure the :class:`~jinja2.Environment` with
+ :class:`~jinja2.ChainableUndefined` to make the `default` filter work
+ on nested elements and attributes that may contain undefined values
+ in the chain without getting an :exc:`~jinja2.UndefinedError`.
"""
if isinstance(value, Undefined) or (boolean and not value):
return default_value
@@ -375,7 +449,7 @@ def do_default(value, default_value=u'', boolean=False):
@evalcontextfilter
-def do_join(eval_ctx, value, d=u'', attribute=None):
+def do_join(eval_ctx, value, d=u"", attribute=None):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
@@ -400,17 +474,17 @@ def do_join(eval_ctx, value, d=u'', attribute=None):
if attribute is not None:
value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
- # no automatic escaping? joining is a lot eaiser then
+ # no automatic escaping? joining is a lot easier then
if not eval_ctx.autoescape:
return text_type(d).join(imap(text_type, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
- if not hasattr(d, '__html__'):
+ if not hasattr(d, "__html__"):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
- if hasattr(item, '__html__'):
+ if hasattr(item, "__html__"):
do_escape = True
else:
value[idx] = text_type(item)
@@ -435,16 +509,25 @@ def do_first(environment, seq):
try:
return next(iter(seq))
except StopIteration:
- return environment.undefined('No first item, sequence was empty.')
+ return environment.undefined("No first item, sequence was empty.")
@environmentfilter
def do_last(environment, seq):
- """Return the last item of a sequence."""
+ """
+ Return the last item of a sequence.
+
+ Note: Does not work with generators. You may want to explicitly
+ convert it to a list:
+
+ .. sourcecode:: jinja
+
+ {{ data | selectattr('name', '==', 'Jinja') | list | last }}
+ """
try:
return next(iter(reversed(seq)))
except StopIteration:
- return environment.undefined('No last item, sequence was empty.')
+ return environment.undefined("No last item, sequence was empty.")
@contextfilter
@@ -453,7 +536,7 @@ def do_random(context, seq):
try:
return random.choice(seq)
except IndexError:
- return context.environment.undefined('No random item, sequence was empty.')
+ return context.environment.undefined("No random item, sequence was empty.")
def do_filesizeformat(value, binary=False):
@@ -465,25 +548,25 @@ def do_filesizeformat(value, binary=False):
bytes = float(value)
base = binary and 1024 or 1000
prefixes = [
- (binary and 'KiB' or 'kB'),
- (binary and 'MiB' or 'MB'),
- (binary and 'GiB' or 'GB'),
- (binary and 'TiB' or 'TB'),
- (binary and 'PiB' or 'PB'),
- (binary and 'EiB' or 'EB'),
- (binary and 'ZiB' or 'ZB'),
- (binary and 'YiB' or 'YB')
+ (binary and "KiB" or "kB"),
+ (binary and "MiB" or "MB"),
+ (binary and "GiB" or "GB"),
+ (binary and "TiB" or "TB"),
+ (binary and "PiB" or "PB"),
+ (binary and "EiB" or "EB"),
+ (binary and "ZiB" or "ZB"),
+ (binary and "YiB" or "YB"),
]
if bytes == 1:
- return '1 Byte'
+ return "1 Byte"
elif bytes < base:
- return '%d Bytes' % bytes
+ return "%d Bytes" % bytes
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
- return '%.1f %s' % ((base * bytes / unit), prefix)
- return '%.1f %s' % ((base * bytes / unit), prefix)
+ return "%.1f %s" % ((base * bytes / unit), prefix)
+ return "%.1f %s" % ((base * bytes / unit), prefix)
def do_pprint(value, verbose=False):
@@ -496,8 +579,9 @@ def do_pprint(value, verbose=False):
@evalcontextfilter
-def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False,
- target=None, rel=None):
+def do_urlize(
+ eval_ctx, value, trim_url_limit=None, nofollow=False, target=None, rel=None
+):
"""Converts URLs in plain text into clickable links.
If you pass the filter an additional integer it will shorten the urls
@@ -520,22 +604,20 @@ def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False,
The *target* parameter was added.
"""
policies = eval_ctx.environment.policies
- rel = set((rel or '').split() or [])
+ rel = set((rel or "").split() or [])
if nofollow:
- rel.add('nofollow')
- rel.update((policies['urlize.rel'] or '').split())
+ rel.add("nofollow")
+ rel.update((policies["urlize.rel"] or "").split())
if target is None:
- target = policies['urlize.target']
- rel = ' '.join(sorted(rel)) or None
+ target = policies["urlize.target"]
+ rel = " ".join(sorted(rel)) or None
rv = urlize(value, trim_url_limit, rel=rel, target=target)
if eval_ctx.autoescape:
rv = Markup(rv)
return rv
-def do_indent(
- s, width=4, first=False, blank=False, indentfirst=None
-):
+def do_indent(s, width=4, first=False, blank=False, indentfirst=None):
"""Return a copy of the string with each line indented by 4 spaces. The
first line and blank lines are not indented by default.
@@ -549,22 +631,31 @@ def do_indent(
Rename the ``indentfirst`` argument to ``first``.
"""
if indentfirst is not None:
- warnings.warn(DeprecationWarning(
- 'The "indentfirst" argument is renamed to "first".'
- ), stacklevel=2)
+ warnings.warn(
+ "The 'indentfirst' argument is renamed to 'first' and will"
+ " be removed in version 3.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
first = indentfirst
- s += u'\n' # this quirk is necessary for splitlines method
- indention = u' ' * width
+ indention = u" " * width
+ newline = u"\n"
+
+ if isinstance(s, Markup):
+ indention = Markup(indention)
+ newline = Markup(newline)
+
+ s += newline # this quirk is necessary for splitlines method
if blank:
- rv = (u'\n' + indention).join(s.splitlines())
+ rv = (newline + indention).join(s.splitlines())
else:
lines = s.splitlines()
rv = lines.pop(0)
if lines:
- rv += u'\n' + u'\n'.join(
+ rv += newline + newline.join(
indention + line if line else line for line in lines
)
@@ -575,7 +666,7 @@ def do_indent(
@environmentfilter
-def do_truncate(env, s, length=255, killwords=False, end='...', leeway=None):
+def do_truncate(env, s, length=255, killwords=False, end="...", leeway=None):
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
parameter is ``true`` the filter will cut the text at length. Otherwise
@@ -596,46 +687,81 @@ def do_truncate(env, s, length=255, killwords=False, end='...', leeway=None):
{{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
-> "foo bar..."
- The default leeway on newer Jinja2 versions is 5 and was 0 before but
+ The default leeway on newer Jinja versions is 5 and was 0 before but
can be reconfigured globally.
"""
if leeway is None:
- leeway = env.policies['truncate.leeway']
- assert length >= len(end), 'expected length >= %s, got %s' % (len(end), length)
- assert leeway >= 0, 'expected leeway >= 0, got %s' % leeway
+ leeway = env.policies["truncate.leeway"]
+ assert length >= len(end), "expected length >= %s, got %s" % (len(end), length)
+ assert leeway >= 0, "expected leeway >= 0, got %s" % leeway
if len(s) <= length + leeway:
return s
if killwords:
- return s[:length - len(end)] + end
- result = s[:length - len(end)].rsplit(' ', 1)[0]
+ return s[: length - len(end)] + end
+ result = s[: length - len(end)].rsplit(" ", 1)[0]
return result + end
@environmentfilter
-def do_wordwrap(environment, s, width=79, break_long_words=True,
- wrapstring=None):
+def do_wordwrap(
+ environment,
+ s,
+ width=79,
+ break_long_words=True,
+ wrapstring=None,
+ break_on_hyphens=True,
+):
+ """Wrap a string to the given width. Existing newlines are treated
+ as paragraphs to be wrapped separately.
+
+ :param s: Original text to wrap.
+ :param width: Maximum length of wrapped lines.
+ :param break_long_words: If a word is longer than ``width``, break
+ it across lines.
+ :param break_on_hyphens: If a word contains hyphens, it may be split
+ across lines.
+ :param wrapstring: String to join each wrapped line. Defaults to
+ :attr:`Environment.newline_sequence`.
+
+ .. versionchanged:: 2.11
+ Existing newlines are treated as paragraphs wrapped separately.
+
+ .. versionchanged:: 2.11
+ Added the ``break_on_hyphens`` parameter.
+
+ .. versionchanged:: 2.7
+ Added the ``wrapstring`` parameter.
"""
- Return a copy of the string passed to the filter wrapped after
- ``79`` characters. You can override this default using the first
- parameter. If you set the second parameter to `false` Jinja will not
- split words apart if they are longer than `width`. By default, the newlines
- will be the default newlines for the environment, but this can be changed
- using the wrapstring keyword argument.
- .. versionadded:: 2.7
- Added support for the `wrapstring` parameter.
- """
+ import textwrap
+
if not wrapstring:
wrapstring = environment.newline_sequence
- import textwrap
- return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False,
- replace_whitespace=False,
- break_long_words=break_long_words))
+
+ # textwrap.wrap doesn't consider existing newlines when wrapping.
+ # If the string has a newline before width, wrap will still insert
+ # a newline at width, resulting in a short line. Instead, split and
+ # wrap each paragraph individually.
+ return wrapstring.join(
+ [
+ wrapstring.join(
+ textwrap.wrap(
+ line,
+ width=width,
+ expand_tabs=False,
+ replace_whitespace=False,
+ break_long_words=break_long_words,
+ break_on_hyphens=break_on_hyphens,
+ )
+ )
+ for line in s.splitlines()
+ ]
+ )
def do_wordcount(s):
"""Count the words in that string."""
- return len(_word_re.findall(s))
+ return len(_word_re.findall(soft_unicode(s)))
def do_int(value, default=0, base=10):
@@ -671,29 +797,40 @@ def do_float(value, default=0.0):
def do_format(value, *args, **kwargs):
- """
- Apply python string formatting on an object:
+ """Apply the given values to a `printf-style`_ format string, like
+ ``string % values``.
.. sourcecode:: jinja
- {{ "%s - %s"|format("Hello?", "Foo!") }}
- -> Hello? - Foo!
+ {{ "%s, %s!"|format(greeting, name) }}
+ Hello, World!
+
+ In most cases it should be more convenient and efficient to use the
+ ``%`` operator or :meth:`str.format`.
+
+ .. code-block:: text
+
+ {{ "%s, %s!" % (greeting, name) }}
+ {{ "{}, {}!".format(greeting, name) }}
+
+ .. _printf-style: https://docs.python.org/library/stdtypes.html
+ #printf-style-string-formatting
"""
if args and kwargs:
- raise FilterArgumentError('can\'t handle positional and keyword '
- 'arguments at the same time')
+ raise FilterArgumentError(
+ "can't handle positional and keyword arguments at the same time"
+ )
return soft_unicode(value) % (kwargs or args)
-def do_trim(value):
- """Strip leading and trailing whitespace."""
- return soft_unicode(value).strip()
+def do_trim(value, chars=None):
+ """Strip leading and trailing characters, by default whitespace."""
+ return soft_unicode(value).strip(chars)
def do_striptags(value):
- """Strip SGML/XML tags and replace adjacent whitespace by one space.
- """
- if hasattr(value, '__html__'):
+ """Strip SGML/XML tags and replace adjacent whitespace by one space."""
+ if hasattr(value, "__html__"):
value = value.__html__()
return Markup(text_type(value)).striptags()
@@ -705,7 +842,7 @@ def do_slice(value, slices, fill_with=None):
.. sourcecode:: html+jinja
- <div class="columwrapper">
+ <div class="columnwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
@@ -765,7 +902,7 @@ def do_batch(value, linecount, fill_with=None):
yield tmp
-def do_round(value, precision=0, method='common'):
+def do_round(value, precision=0, method="common"):
"""Round the number to a given precision. The first
parameter specifies the precision (default is ``0``), the
second the rounding method:
@@ -791,9 +928,9 @@ def do_round(value, precision=0, method='common'):
{{ 42.55|round|int }}
-> 43
"""
- if not method in ('common', 'ceil', 'floor'):
- raise FilterArgumentError('method must be common, ceil or floor')
- if method == 'common':
+ if method not in {"common", "ceil", "floor"}:
+ raise FilterArgumentError("method must be common, ceil or floor")
+ if method == "common":
return round(value, precision)
func = getattr(math, method)
return func(value * (10 ** precision)) / (10 ** precision)
@@ -804,52 +941,51 @@ def do_round(value, precision=0, method='common'):
# we do not want to accidentally expose an auto generated repr in case
# people start to print this out in comments or something similar for
# debugging.
-_GroupTuple = namedtuple('_GroupTuple', ['grouper', 'list'])
+_GroupTuple = namedtuple("_GroupTuple", ["grouper", "list"])
_GroupTuple.__repr__ = tuple.__repr__
_GroupTuple.__str__ = tuple.__str__
+
@environmentfilter
def do_groupby(environment, value, attribute):
- """Group a sequence of objects by a common attribute.
+ """Group a sequence of objects by an attribute using Python's
+ :func:`itertools.groupby`. The attribute can use dot notation for
+ nested access, like ``"address.city"``. Unlike Python's ``groupby``,
+ the values are sorted first so only one group is returned for each
+ unique value.
- If you for example have a list of dicts or objects that represent persons
- with `gender`, `first_name` and `last_name` attributes and you want to
- group all users by genders you can do something like the following
- snippet:
+ For example, a list of ``User`` objects with a ``city`` attribute
+ can be rendered in groups. In this example, ``grouper`` refers to
+ the ``city`` value of the group.
.. sourcecode:: html+jinja
- <ul>
- {% for group in persons|groupby('gender') %}
- <li>{{ group.grouper }}<ul>
- {% for person in group.list %}
- <li>{{ person.first_name }} {{ person.last_name }}</li>
- {% endfor %}</ul></li>
- {% endfor %}
- </ul>
+ <ul>{% for city, items in users|groupby("city") %}
+ <li>{{ city }}
+ <ul>{% for user in items %}
+ <li>{{ user.name }}
+ {% endfor %}</ul>
+ </li>
+ {% endfor %}</ul>
- Additionally it's possible to use tuple unpacking for the grouper and
- list:
+ ``groupby`` yields namedtuples of ``(grouper, list)``, which
+ can be used instead of the tuple unpacking above. ``grouper`` is the
+ value of the attribute, and ``list`` is the items with that value.
.. sourcecode:: html+jinja
- <ul>
- {% for grouper, list in persons|groupby('gender') %}
- ...
- {% endfor %}
- </ul>
-
- As you can see the item we're grouping by is stored in the `grouper`
- attribute and the `list` contains all the objects that have this grouper
- in common.
+ <ul>{% for group in users|groupby("city") %}
+ <li>{{ group.grouper }}: {{ group.list|join(", ") }}
+ {% endfor %}</ul>
.. versionchanged:: 2.6
- It's now possible to use dotted notation to group by the child
- attribute of another attribute.
+ The attribute supports dot notation for nested access.
"""
expr = make_attrgetter(environment, attribute)
- return [_GroupTuple(key, list(values)) for key, values
- in groupby(sorted(value, key=expr), expr)]
+ return [
+ _GroupTuple(key, list(values))
+ for key, values in groupby(sorted(value, key=expr), expr)
+ ]
@environmentfilter
@@ -906,7 +1042,7 @@ def do_reverse(value):
rv.reverse()
return rv
except TypeError:
- raise FilterArgumentError('argument must be iterable')
+ raise FilterArgumentError("argument must be iterable")
@environmentfilter
@@ -927,8 +1063,9 @@ def do_attr(environment, obj, name):
except AttributeError:
pass
else:
- if environment.sandboxed and not \
- environment.is_safe_attribute(obj, name, value):
+ if environment.sandboxed and not environment.is_safe_attribute(
+ obj, name, value
+ ):
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name)
@@ -947,6 +1084,13 @@ def do_map(*args, **kwargs):
Users on this page: {{ users|map(attribute='username')|join(', ') }}
+ You can specify a ``default`` value to use if an object in the list
+ does not have the given attribute.
+
+ .. sourcecode:: jinja
+
+ {{ users|map(attribute="username", default="Anonymous")|join(", ") }}
+
Alternatively you can let it invoke a filter by passing the name of the
filter and the arguments afterwards. A good example would be applying a
text conversion filter on a sequence:
@@ -955,6 +1099,17 @@ def do_map(*args, **kwargs):
Users on this page: {{ titles|map('lower')|join(', ') }}
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (u.username for u in users)
+ (u.username or "Anonymous" for u in users)
+ (do_lower(x) for x in titles)
+
+ .. versionchanged:: 2.11.0
+ Added the ``default`` parameter.
+
.. versionadded:: 2.7
"""
seq, func = prepare_map(args, kwargs)
@@ -980,6 +1135,13 @@ def do_select(*args, **kwargs):
{{ numbers|select("lessthan", 42) }}
{{ strings|select("equalto", "mystring") }}
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (n for n in numbers if test_odd(n))
+ (n for n in numbers if test_divisibleby(n, 3))
+
.. versionadded:: 2.7
"""
return select_or_reject(args, kwargs, lambda x: x, False)
@@ -998,6 +1160,12 @@ def do_reject(*args, **kwargs):
{{ numbers|reject("odd") }}
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (n for n in numbers if not test_odd(n))
+
.. versionadded:: 2.7
"""
return select_or_reject(args, kwargs, lambda x: not x, False)
@@ -1019,6 +1187,13 @@ def do_selectattr(*args, **kwargs):
{{ users|selectattr("is_active") }}
{{ users|selectattr("email", "none") }}
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (u for user in users if user.is_active)
+ (u for user in users if test_none(user.email))
+
.. versionadded:: 2.7
"""
return select_or_reject(args, kwargs, lambda x: x, True)
@@ -1038,6 +1213,13 @@ def do_rejectattr(*args, **kwargs):
{{ users|rejectattr("is_active") }}
{{ users|rejectattr("email", "none") }}
+ Similar to a generator comprehension such as:
+
+ .. code-block:: python
+
+ (u for user in users if not user.is_active)
+ (u for user in users if not test_none(user.email))
+
.. versionadded:: 2.7
"""
return select_or_reject(args, kwargs, lambda x: not x, True)
@@ -1070,32 +1252,38 @@ def do_tojson(eval_ctx, value, indent=None):
.. versionadded:: 2.9
"""
policies = eval_ctx.environment.policies
- dumper = policies['json.dumps_function']
- options = policies['json.dumps_kwargs']
+ dumper = policies["json.dumps_function"]
+ options = policies["json.dumps_kwargs"]
if indent is not None:
options = dict(options)
- options['indent'] = indent
+ options["indent"] = indent
return htmlsafe_json_dumps(value, dumper=dumper, **options)
def prepare_map(args, kwargs):
context = args[0]
seq = args[1]
+ default = None
- if len(args) == 2 and 'attribute' in kwargs:
- attribute = kwargs.pop('attribute')
+ if len(args) == 2 and "attribute" in kwargs:
+ attribute = kwargs.pop("attribute")
+ default = kwargs.pop("default", None)
if kwargs:
- raise FilterArgumentError('Unexpected keyword argument %r' %
- next(iter(kwargs)))
- func = make_attrgetter(context.environment, attribute)
+ raise FilterArgumentError(
+ "Unexpected keyword argument %r" % next(iter(kwargs))
+ )
+ func = make_attrgetter(context.environment, attribute, default=default)
else:
try:
name = args[2]
args = args[3:]
except LookupError:
- raise FilterArgumentError('map requires a filter argument')
- func = lambda item: context.environment.call_filter(
- name, item, args, kwargs, context=context)
+ raise FilterArgumentError("map requires a filter argument")
+
+ def func(item):
+ return context.environment.call_filter(
+ name, item, args, kwargs, context=context
+ )
return seq, func
@@ -1107,18 +1295,22 @@ def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr):
try:
attr = args[2]
except LookupError:
- raise FilterArgumentError('Missing parameter for attribute name')
+ raise FilterArgumentError("Missing parameter for attribute name")
transfunc = make_attrgetter(context.environment, attr)
off = 1
else:
off = 0
- transfunc = lambda x: x
+
+ def transfunc(x):
+ return x
try:
name = args[2 + off]
- args = args[3 + off:]
- func = lambda item: context.environment.call_test(
- name, item, args, kwargs)
+ args = args[3 + off :]
+
+ def func(item):
+ return context.environment.call_test(name, item, args, kwargs)
+
except LookupError:
func = bool
@@ -1134,57 +1326,57 @@ def select_or_reject(args, kwargs, modfunc, lookup_attr):
FILTERS = {
- 'abs': abs,
- 'attr': do_attr,
- 'batch': do_batch,
- 'capitalize': do_capitalize,
- 'center': do_center,
- 'count': len,
- 'd': do_default,
- 'default': do_default,
- 'dictsort': do_dictsort,
- 'e': escape,
- 'escape': escape,
- 'filesizeformat': do_filesizeformat,
- 'first': do_first,
- 'float': do_float,
- 'forceescape': do_forceescape,
- 'format': do_format,
- 'groupby': do_groupby,
- 'indent': do_indent,
- 'int': do_int,
- 'join': do_join,
- 'last': do_last,
- 'length': len,
- 'list': do_list,
- 'lower': do_lower,
- 'map': do_map,
- 'min': do_min,
- 'max': do_max,
- 'pprint': do_pprint,
- 'random': do_random,
- 'reject': do_reject,
- 'rejectattr': do_rejectattr,
- 'replace': do_replace,
- 'reverse': do_reverse,
- 'round': do_round,
- 'safe': do_mark_safe,
- 'select': do_select,
- 'selectattr': do_selectattr,
- 'slice': do_slice,
- 'sort': do_sort,
- 'string': soft_unicode,
- 'striptags': do_striptags,
- 'sum': do_sum,
- 'title': do_title,
- 'trim': do_trim,
- 'truncate': do_truncate,
- 'unique': do_unique,
- 'upper': do_upper,
- 'urlencode': do_urlencode,
- 'urlize': do_urlize,
- 'wordcount': do_wordcount,
- 'wordwrap': do_wordwrap,
- 'xmlattr': do_xmlattr,
- 'tojson': do_tojson,
+ "abs": abs,
+ "attr": do_attr,
+ "batch": do_batch,
+ "capitalize": do_capitalize,
+ "center": do_center,
+ "count": len,
+ "d": do_default,
+ "default": do_default,
+ "dictsort": do_dictsort,
+ "e": escape,
+ "escape": escape,
+ "filesizeformat": do_filesizeformat,
+ "first": do_first,
+ "float": do_float,
+ "forceescape": do_forceescape,
+ "format": do_format,
+ "groupby": do_groupby,
+ "indent": do_indent,
+ "int": do_int,
+ "join": do_join,
+ "last": do_last,
+ "length": len,
+ "list": do_list,
+ "lower": do_lower,
+ "map": do_map,
+ "min": do_min,
+ "max": do_max,
+ "pprint": do_pprint,
+ "random": do_random,
+ "reject": do_reject,
+ "rejectattr": do_rejectattr,
+ "replace": do_replace,
+ "reverse": do_reverse,
+ "round": do_round,
+ "safe": do_mark_safe,
+ "select": do_select,
+ "selectattr": do_selectattr,
+ "slice": do_slice,
+ "sort": do_sort,
+ "string": soft_unicode,
+ "striptags": do_striptags,
+ "sum": do_sum,
+ "title": do_title,
+ "trim": do_trim,
+ "truncate": do_truncate,
+ "unique": do_unique,
+ "upper": do_upper,
+ "urlencode": do_urlencode,
+ "urlize": do_urlize,
+ "wordcount": do_wordcount,
+ "wordwrap": do_wordwrap,
+ "xmlattr": do_xmlattr,
+ "tojson": do_tojson,
}
diff --git a/lib/spack/external/jinja2/idtracking.py b/lib/spack/external/jinja2/idtracking.py
index 491bfe0836..9a0d838017 100644
--- a/lib/spack/external/jinja2/idtracking.py
+++ b/lib/spack/external/jinja2/idtracking.py
@@ -1,11 +1,10 @@
-from jinja2.visitor import NodeVisitor
-from jinja2._compat import iteritems
+from ._compat import iteritems
+from .visitor import NodeVisitor
-
-VAR_LOAD_PARAMETER = 'param'
-VAR_LOAD_RESOLVE = 'resolve'
-VAR_LOAD_ALIAS = 'alias'
-VAR_LOAD_UNDEFINED = 'undefined'
+VAR_LOAD_PARAMETER = "param"
+VAR_LOAD_RESOLVE = "resolve"
+VAR_LOAD_ALIAS = "alias"
+VAR_LOAD_UNDEFINED = "undefined"
def find_symbols(nodes, parent_symbols=None):
@@ -23,7 +22,6 @@ def symbols_for_node(node, parent_symbols=None):
class Symbols(object):
-
def __init__(self, parent=None, level=None):
if level is None:
if parent is None:
@@ -41,7 +39,7 @@ class Symbols(object):
visitor.visit(node, **kwargs)
def _define_ref(self, name, load=None):
- ident = 'l_%d_%s' % (self.level, name)
+ ident = "l_%d_%s" % (self.level, name)
self.refs[name] = ident
if load is not None:
self.loads[ident] = load
@@ -62,8 +60,10 @@ class Symbols(object):
def ref(self, name):
rv = self.find_ref(name)
if rv is None:
- raise AssertionError('Tried to resolve a name to a reference that '
- 'was unknown to the frame (%r)' % name)
+ raise AssertionError(
+ "Tried to resolve a name to a reference that "
+ "was unknown to the frame (%r)" % name
+ )
return rv
def copy(self):
@@ -118,7 +118,7 @@ class Symbols(object):
if branch_count == len(branch_symbols):
continue
target = self.find_ref(name)
- assert target is not None, 'should not happen'
+ assert target is not None, "should not happen"
if self.parent is not None:
outer_target = self.parent.find_ref(name)
@@ -149,7 +149,6 @@ class Symbols(object):
class RootVisitor(NodeVisitor):
-
def __init__(self, symbols):
self.sym_visitor = FrameSymbolVisitor(symbols)
@@ -157,35 +156,39 @@ class RootVisitor(NodeVisitor):
for child in node.iter_child_nodes():
self.sym_visitor.visit(child)
- visit_Template = visit_Block = visit_Macro = visit_FilterBlock = \
- visit_Scope = visit_If = visit_ScopedEvalContextModifier = \
- _simple_visit
+ visit_Template = (
+ visit_Block
+ ) = (
+ visit_Macro
+ ) = (
+ visit_FilterBlock
+ ) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit
def visit_AssignBlock(self, node, **kwargs):
for child in node.body:
self.sym_visitor.visit(child)
def visit_CallBlock(self, node, **kwargs):
- for child in node.iter_child_nodes(exclude=('call',)):
+ for child in node.iter_child_nodes(exclude=("call",)):
self.sym_visitor.visit(child)
def visit_OverlayScope(self, node, **kwargs):
for child in node.body:
self.sym_visitor.visit(child)
- def visit_For(self, node, for_branch='body', **kwargs):
- if for_branch == 'body':
+ def visit_For(self, node, for_branch="body", **kwargs):
+ if for_branch == "body":
self.sym_visitor.visit(node.target, store_as_param=True)
branch = node.body
- elif for_branch == 'else':
+ elif for_branch == "else":
branch = node.else_
- elif for_branch == 'test':
+ elif for_branch == "test":
self.sym_visitor.visit(node.target, store_as_param=True)
if node.test is not None:
self.sym_visitor.visit(node.test)
return
else:
- raise RuntimeError('Unknown for branch')
+ raise RuntimeError("Unknown for branch")
for item in branch or ():
self.sym_visitor.visit(item)
@@ -196,8 +199,9 @@ class RootVisitor(NodeVisitor):
self.sym_visitor.visit(child)
def generic_visit(self, node, *args, **kwargs):
- raise NotImplementedError('Cannot find symbols for %r' %
- node.__class__.__name__)
+ raise NotImplementedError(
+ "Cannot find symbols for %r" % node.__class__.__name__
+ )
class FrameSymbolVisitor(NodeVisitor):
@@ -208,11 +212,11 @@ class FrameSymbolVisitor(NodeVisitor):
def visit_Name(self, node, store_as_param=False, **kwargs):
"""All assignments to names go through this function."""
- if store_as_param or node.ctx == 'param':
+ if store_as_param or node.ctx == "param":
self.symbols.declare_parameter(node.name)
- elif node.ctx == 'store':
+ elif node.ctx == "store":
self.symbols.store(node.name)
- elif node.ctx == 'load':
+ elif node.ctx == "load":
self.symbols.load(node.name)
def visit_NSRef(self, node, **kwargs):
diff --git a/lib/spack/external/jinja2/lexer.py b/lib/spack/external/jinja2/lexer.py
index 6fd135dd5b..552356a12d 100644
--- a/lib/spack/external/jinja2/lexer.py
+++ b/lib/spack/external/jinja2/lexer.py
@@ -1,185 +1,194 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.lexer
- ~~~~~~~~~~~~
-
- This module implements a Jinja / Python combination lexer. The
- `Lexer` class provided by this module is used to do some preprocessing
- for Jinja.
-
- On the one hand it filters out invalid operators like the bitshift
- operators we don't allow in templates. On the other hand it separates
- template code and python code in expressions.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
+"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
+is used to do some preprocessing. It filters out invalid operators like
+the bitshift operators we don't allow in templates. It separates
+template code and python code in expressions.
"""
import re
+from ast import literal_eval
from collections import deque
from operator import itemgetter
-from jinja2._compat import implements_iterator, intern, iteritems, text_type
-from jinja2.exceptions import TemplateSyntaxError
-from jinja2.utils import LRUCache
+from ._compat import implements_iterator
+from ._compat import intern
+from ._compat import iteritems
+from ._compat import text_type
+from .exceptions import TemplateSyntaxError
+from .utils import LRUCache
# cache for the lexers. Exists in order to be able to have multiple
# environments with the same lexer
_lexer_cache = LRUCache(50)
# static regular expressions
-whitespace_re = re.compile(r'\s+', re.U)
-string_re = re.compile(r"('([^'\\]*(?:\\.[^'\\]*)*)'"
- r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S)
-integer_re = re.compile(r'\d+')
+whitespace_re = re.compile(r"\s+", re.U)
+newline_re = re.compile(r"(\r\n|\r|\n)")
+string_re = re.compile(
+ r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
+)
+integer_re = re.compile(r"(\d+_)*\d+")
+float_re = re.compile(
+ r"""
+ (?<!\.) # doesn't start with a .
+ (\d+_)*\d+ # digits, possibly _ separated
+ (
+ (\.(\d+_)*\d+)? # optional fractional part
+ e[+\-]?(\d+_)*\d+ # exponent part
+ |
+ \.(\d+_)*\d+ # required fractional part
+ )
+ """,
+ re.IGNORECASE | re.VERBOSE,
+)
try:
# check if this Python supports Unicode identifiers
- compile('föö', '<unknown>', 'eval')
+ compile("föö", "<unknown>", "eval")
except SyntaxError:
- # no Unicode support, use ASCII identifiers
- name_re = re.compile(r'[a-zA-Z_][a-zA-Z0-9_]*')
+ # Python 2, no Unicode support, use ASCII identifiers
+ name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*")
check_ident = False
else:
- # Unicode support, build a pattern to match valid characters, and set flag
- # to use str.isidentifier to validate during lexing
- from jinja2 import _identifier
- name_re = re.compile(r'[\w{0}]+'.format(_identifier.pattern))
- check_ident = True
- # remove the pattern from memory after building the regex
- import sys
- del sys.modules['jinja2._identifier']
- import jinja2
- del jinja2._identifier
- del _identifier
+ # Unicode support, import generated re pattern and set flag to use
+ # str.isidentifier to validate during lexing.
+ from ._identifier import pattern as name_re
-float_re = re.compile(r'(?<!\.)\d+\.\d+')
-newline_re = re.compile(r'(\r\n|\r|\n)')
+ check_ident = True
# internal the tokens and keep references to them
-TOKEN_ADD = intern('add')
-TOKEN_ASSIGN = intern('assign')
-TOKEN_COLON = intern('colon')
-TOKEN_COMMA = intern('comma')
-TOKEN_DIV = intern('div')
-TOKEN_DOT = intern('dot')
-TOKEN_EQ = intern('eq')
-TOKEN_FLOORDIV = intern('floordiv')
-TOKEN_GT = intern('gt')
-TOKEN_GTEQ = intern('gteq')
-TOKEN_LBRACE = intern('lbrace')
-TOKEN_LBRACKET = intern('lbracket')
-TOKEN_LPAREN = intern('lparen')
-TOKEN_LT = intern('lt')
-TOKEN_LTEQ = intern('lteq')
-TOKEN_MOD = intern('mod')
-TOKEN_MUL = intern('mul')
-TOKEN_NE = intern('ne')
-TOKEN_PIPE = intern('pipe')
-TOKEN_POW = intern('pow')
-TOKEN_RBRACE = intern('rbrace')
-TOKEN_RBRACKET = intern('rbracket')
-TOKEN_RPAREN = intern('rparen')
-TOKEN_SEMICOLON = intern('semicolon')
-TOKEN_SUB = intern('sub')
-TOKEN_TILDE = intern('tilde')
-TOKEN_WHITESPACE = intern('whitespace')
-TOKEN_FLOAT = intern('float')
-TOKEN_INTEGER = intern('integer')
-TOKEN_NAME = intern('name')
-TOKEN_STRING = intern('string')
-TOKEN_OPERATOR = intern('operator')
-TOKEN_BLOCK_BEGIN = intern('block_begin')
-TOKEN_BLOCK_END = intern('block_end')
-TOKEN_VARIABLE_BEGIN = intern('variable_begin')
-TOKEN_VARIABLE_END = intern('variable_end')
-TOKEN_RAW_BEGIN = intern('raw_begin')
-TOKEN_RAW_END = intern('raw_end')
-TOKEN_COMMENT_BEGIN = intern('comment_begin')
-TOKEN_COMMENT_END = intern('comment_end')
-TOKEN_COMMENT = intern('comment')
-TOKEN_LINESTATEMENT_BEGIN = intern('linestatement_begin')
-TOKEN_LINESTATEMENT_END = intern('linestatement_end')
-TOKEN_LINECOMMENT_BEGIN = intern('linecomment_begin')
-TOKEN_LINECOMMENT_END = intern('linecomment_end')
-TOKEN_LINECOMMENT = intern('linecomment')
-TOKEN_DATA = intern('data')
-TOKEN_INITIAL = intern('initial')
-TOKEN_EOF = intern('eof')
+TOKEN_ADD = intern("add")
+TOKEN_ASSIGN = intern("assign")
+TOKEN_COLON = intern("colon")
+TOKEN_COMMA = intern("comma")
+TOKEN_DIV = intern("div")
+TOKEN_DOT = intern("dot")
+TOKEN_EQ = intern("eq")
+TOKEN_FLOORDIV = intern("floordiv")
+TOKEN_GT = intern("gt")
+TOKEN_GTEQ = intern("gteq")
+TOKEN_LBRACE = intern("lbrace")
+TOKEN_LBRACKET = intern("lbracket")
+TOKEN_LPAREN = intern("lparen")
+TOKEN_LT = intern("lt")
+TOKEN_LTEQ = intern("lteq")
+TOKEN_MOD = intern("mod")
+TOKEN_MUL = intern("mul")
+TOKEN_NE = intern("ne")
+TOKEN_PIPE = intern("pipe")
+TOKEN_POW = intern("pow")
+TOKEN_RBRACE = intern("rbrace")
+TOKEN_RBRACKET = intern("rbracket")
+TOKEN_RPAREN = intern("rparen")
+TOKEN_SEMICOLON = intern("semicolon")
+TOKEN_SUB = intern("sub")
+TOKEN_TILDE = intern("tilde")
+TOKEN_WHITESPACE = intern("whitespace")
+TOKEN_FLOAT = intern("float")
+TOKEN_INTEGER = intern("integer")
+TOKEN_NAME = intern("name")
+TOKEN_STRING = intern("string")
+TOKEN_OPERATOR = intern("operator")
+TOKEN_BLOCK_BEGIN = intern("block_begin")
+TOKEN_BLOCK_END = intern("block_end")
+TOKEN_VARIABLE_BEGIN = intern("variable_begin")
+TOKEN_VARIABLE_END = intern("variable_end")
+TOKEN_RAW_BEGIN = intern("raw_begin")
+TOKEN_RAW_END = intern("raw_end")
+TOKEN_COMMENT_BEGIN = intern("comment_begin")
+TOKEN_COMMENT_END = intern("comment_end")
+TOKEN_COMMENT = intern("comment")
+TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin")
+TOKEN_LINESTATEMENT_END = intern("linestatement_end")
+TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin")
+TOKEN_LINECOMMENT_END = intern("linecomment_end")
+TOKEN_LINECOMMENT = intern("linecomment")
+TOKEN_DATA = intern("data")
+TOKEN_INITIAL = intern("initial")
+TOKEN_EOF = intern("eof")
# bind operators to token types
operators = {
- '+': TOKEN_ADD,
- '-': TOKEN_SUB,
- '/': TOKEN_DIV,
- '//': TOKEN_FLOORDIV,
- '*': TOKEN_MUL,
- '%': TOKEN_MOD,
- '**': TOKEN_POW,
- '~': TOKEN_TILDE,
- '[': TOKEN_LBRACKET,
- ']': TOKEN_RBRACKET,
- '(': TOKEN_LPAREN,
- ')': TOKEN_RPAREN,
- '{': TOKEN_LBRACE,
- '}': TOKEN_RBRACE,
- '==': TOKEN_EQ,
- '!=': TOKEN_NE,
- '>': TOKEN_GT,
- '>=': TOKEN_GTEQ,
- '<': TOKEN_LT,
- '<=': TOKEN_LTEQ,
- '=': TOKEN_ASSIGN,
- '.': TOKEN_DOT,
- ':': TOKEN_COLON,
- '|': TOKEN_PIPE,
- ',': TOKEN_COMMA,
- ';': TOKEN_SEMICOLON
+ "+": TOKEN_ADD,
+ "-": TOKEN_SUB,
+ "/": TOKEN_DIV,
+ "//": TOKEN_FLOORDIV,
+ "*": TOKEN_MUL,
+ "%": TOKEN_MOD,
+ "**": TOKEN_POW,
+ "~": TOKEN_TILDE,
+ "[": TOKEN_LBRACKET,
+ "]": TOKEN_RBRACKET,
+ "(": TOKEN_LPAREN,
+ ")": TOKEN_RPAREN,
+ "{": TOKEN_LBRACE,
+ "}": TOKEN_RBRACE,
+ "==": TOKEN_EQ,
+ "!=": TOKEN_NE,
+ ">": TOKEN_GT,
+ ">=": TOKEN_GTEQ,
+ "<": TOKEN_LT,
+ "<=": TOKEN_LTEQ,
+ "=": TOKEN_ASSIGN,
+ ".": TOKEN_DOT,
+ ":": TOKEN_COLON,
+ "|": TOKEN_PIPE,
+ ",": TOKEN_COMMA,
+ ";": TOKEN_SEMICOLON,
}
reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
-assert len(operators) == len(reverse_operators), 'operators dropped'
-operator_re = re.compile('(%s)' % '|'.join(re.escape(x) for x in
- sorted(operators, key=lambda x: -len(x))))
-
-ignored_tokens = frozenset([TOKEN_COMMENT_BEGIN, TOKEN_COMMENT,
- TOKEN_COMMENT_END, TOKEN_WHITESPACE,
- TOKEN_LINECOMMENT_BEGIN, TOKEN_LINECOMMENT_END,
- TOKEN_LINECOMMENT])
-ignore_if_empty = frozenset([TOKEN_WHITESPACE, TOKEN_DATA,
- TOKEN_COMMENT, TOKEN_LINECOMMENT])
+assert len(operators) == len(reverse_operators), "operators dropped"
+operator_re = re.compile(
+ "(%s)" % "|".join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))
+)
+
+ignored_tokens = frozenset(
+ [
+ TOKEN_COMMENT_BEGIN,
+ TOKEN_COMMENT,
+ TOKEN_COMMENT_END,
+ TOKEN_WHITESPACE,
+ TOKEN_LINECOMMENT_BEGIN,
+ TOKEN_LINECOMMENT_END,
+ TOKEN_LINECOMMENT,
+ ]
+)
+ignore_if_empty = frozenset(
+ [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
+)
def _describe_token_type(token_type):
if token_type in reverse_operators:
return reverse_operators[token_type]
return {
- TOKEN_COMMENT_BEGIN: 'begin of comment',
- TOKEN_COMMENT_END: 'end of comment',
- TOKEN_COMMENT: 'comment',
- TOKEN_LINECOMMENT: 'comment',
- TOKEN_BLOCK_BEGIN: 'begin of statement block',
- TOKEN_BLOCK_END: 'end of statement block',
- TOKEN_VARIABLE_BEGIN: 'begin of print statement',
- TOKEN_VARIABLE_END: 'end of print statement',
- TOKEN_LINESTATEMENT_BEGIN: 'begin of line statement',
- TOKEN_LINESTATEMENT_END: 'end of line statement',
- TOKEN_DATA: 'template data / text',
- TOKEN_EOF: 'end of template'
+ TOKEN_COMMENT_BEGIN: "begin of comment",
+ TOKEN_COMMENT_END: "end of comment",
+ TOKEN_COMMENT: "comment",
+ TOKEN_LINECOMMENT: "comment",
+ TOKEN_BLOCK_BEGIN: "begin of statement block",
+ TOKEN_BLOCK_END: "end of statement block",
+ TOKEN_VARIABLE_BEGIN: "begin of print statement",
+ TOKEN_VARIABLE_END: "end of print statement",
+ TOKEN_LINESTATEMENT_BEGIN: "begin of line statement",
+ TOKEN_LINESTATEMENT_END: "end of line statement",
+ TOKEN_DATA: "template data / text",
+ TOKEN_EOF: "end of template",
}.get(token_type, token_type)
def describe_token(token):
"""Returns a description of the token."""
- if token.type == 'name':
+ if token.type == TOKEN_NAME:
return token.value
return _describe_token_type(token.type)
def describe_token_expr(expr):
"""Like `describe_token` but for token expressions."""
- if ':' in expr:
- type, value = expr.split(':', 1)
- if type == 'name':
+ if ":" in expr:
+ type, value = expr.split(":", 1)
+ if type == TOKEN_NAME:
return value
else:
type = expr
@@ -197,21 +206,39 @@ def compile_rules(environment):
"""Compiles all the rules from the environment into a list of rules."""
e = re.escape
rules = [
- (len(environment.comment_start_string), 'comment',
- e(environment.comment_start_string)),
- (len(environment.block_start_string), 'block',
- e(environment.block_start_string)),
- (len(environment.variable_start_string), 'variable',
- e(environment.variable_start_string))
+ (
+ len(environment.comment_start_string),
+ TOKEN_COMMENT_BEGIN,
+ e(environment.comment_start_string),
+ ),
+ (
+ len(environment.block_start_string),
+ TOKEN_BLOCK_BEGIN,
+ e(environment.block_start_string),
+ ),
+ (
+ len(environment.variable_start_string),
+ TOKEN_VARIABLE_BEGIN,
+ e(environment.variable_start_string),
+ ),
]
if environment.line_statement_prefix is not None:
- rules.append((len(environment.line_statement_prefix), 'linestatement',
- r'^[ \t\v]*' + e(environment.line_statement_prefix)))
+ rules.append(
+ (
+ len(environment.line_statement_prefix),
+ TOKEN_LINESTATEMENT_BEGIN,
+ r"^[ \t\v]*" + e(environment.line_statement_prefix),
+ )
+ )
if environment.line_comment_prefix is not None:
- rules.append((len(environment.line_comment_prefix), 'linecomment',
- r'(?:^|(?<=\S))[^\S\r\n]*' +
- e(environment.line_comment_prefix)))
+ rules.append(
+ (
+ len(environment.line_comment_prefix),
+ TOKEN_LINECOMMENT_BEGIN,
+ r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
+ )
+ )
return [x[1:] for x in sorted(rules, reverse=True)]
@@ -231,6 +258,7 @@ class Failure(object):
class Token(tuple):
"""Token class."""
+
__slots__ = ()
lineno, type, value = (property(itemgetter(x)) for x in range(3))
@@ -240,7 +268,7 @@ class Token(tuple):
def __str__(self):
if self.type in reverse_operators:
return reverse_operators[self.type]
- elif self.type == 'name':
+ elif self.type == "name":
return self.value
return self.type
@@ -253,8 +281,8 @@ class Token(tuple):
# passed an iterable of not interned strings.
if self.type == expr:
return True
- elif ':' in expr:
- return expr.split(':', 1) == [self.type, self.value]
+ elif ":" in expr:
+ return expr.split(":", 1) == [self.type, self.value]
return False
def test_any(self, *iterable):
@@ -265,11 +293,7 @@ class Token(tuple):
return False
def __repr__(self):
- return 'Token(%r, %r, %r)' % (
- self.lineno,
- self.type,
- self.value
- )
+ return "Token(%r, %r, %r)" % (self.lineno, self.type, self.value)
@implements_iterator
@@ -306,7 +330,7 @@ class TokenStream(object):
self.name = name
self.filename = filename
self.closed = False
- self.current = Token(1, TOKEN_INITIAL, '')
+ self.current = Token(1, TOKEN_INITIAL, "")
next(self)
def __iter__(self):
@@ -314,9 +338,13 @@ class TokenStream(object):
def __bool__(self):
return bool(self._pushed) or self.current.type is not TOKEN_EOF
+
__nonzero__ = __bool__ # py2
- eos = property(lambda x: not x, doc="Are we at the end of the stream?")
+ @property
+ def eos(self):
+ """Are we at the end of the stream?"""
+ return not self
def push(self, token):
"""Push a token back to the stream."""
@@ -332,7 +360,7 @@ class TokenStream(object):
def skip(self, n=1):
"""Got n tokens ahead."""
- for x in range(n):
+ for _ in range(n):
next(self)
def next_if(self, expr):
@@ -363,7 +391,7 @@ class TokenStream(object):
def close(self):
"""Close the stream."""
- self.current = Token(self.current.lineno, TOKEN_EOF, '')
+ self.current = Token(self.current.lineno, TOKEN_EOF, "")
self._iter = None
self.closed = True
@@ -374,14 +402,18 @@ class TokenStream(object):
if not self.current.test(expr):
expr = describe_token_expr(expr)
if self.current.type is TOKEN_EOF:
- raise TemplateSyntaxError('unexpected end of template, '
- 'expected %r.' % expr,
- self.current.lineno,
- self.name, self.filename)
- raise TemplateSyntaxError("expected token %r, got %r" %
- (expr, describe_token(self.current)),
- self.current.lineno,
- self.name, self.filename)
+ raise TemplateSyntaxError(
+ "unexpected end of template, expected %r." % expr,
+ self.current.lineno,
+ self.name,
+ self.filename,
+ )
+ raise TemplateSyntaxError(
+ "expected token %r, got %r" % (expr, describe_token(self.current)),
+ self.current.lineno,
+ self.name,
+ self.filename,
+ )
try:
return self.current
finally:
@@ -390,18 +422,20 @@ class TokenStream(object):
def get_lexer(environment):
"""Return a lexer which is probably cached."""
- key = (environment.block_start_string,
- environment.block_end_string,
- environment.variable_start_string,
- environment.variable_end_string,
- environment.comment_start_string,
- environment.comment_end_string,
- environment.line_statement_prefix,
- environment.line_comment_prefix,
- environment.trim_blocks,
- environment.lstrip_blocks,
- environment.newline_sequence,
- environment.keep_trailing_newline)
+ key = (
+ environment.block_start_string,
+ environment.block_end_string,
+ environment.variable_start_string,
+ environment.variable_end_string,
+ environment.comment_start_string,
+ environment.comment_end_string,
+ environment.line_statement_prefix,
+ environment.line_comment_prefix,
+ environment.trim_blocks,
+ environment.lstrip_blocks,
+ environment.newline_sequence,
+ environment.keep_trailing_newline,
+ )
lexer = _lexer_cache.get(key)
if lexer is None:
lexer = Lexer(environment)
@@ -409,6 +443,19 @@ def get_lexer(environment):
return lexer
+class OptionalLStrip(tuple):
+ """A special tuple for marking a point in the state that can have
+ lstrip applied.
+ """
+
+ __slots__ = ()
+
+ # Even though it looks like a no-op, creating instances fails
+ # without this.
+ def __new__(cls, *members, **kwargs):
+ return super(OptionalLStrip, cls).__new__(cls, members)
+
+
class Lexer(object):
"""Class that implements a lexer for a given environment. Automatically
created by the environment class, usually you don't have to do that.
@@ -419,9 +466,11 @@ class Lexer(object):
def __init__(self, environment):
# shortcuts
- c = lambda x: re.compile(x, re.M | re.S)
e = re.escape
+ def c(x):
+ return re.compile(x, re.M | re.S)
+
# lexing rules for tags
tag_rules = [
(whitespace_re, TOKEN_WHITESPACE, None),
@@ -429,7 +478,7 @@ class Lexer(object):
(integer_re, TOKEN_INTEGER, None),
(name_re, TOKEN_NAME, None),
(string_re, TOKEN_STRING, None),
- (operator_re, TOKEN_OPERATOR, None)
+ (operator_re, TOKEN_OPERATOR, None),
]
# assemble the root lexing rule. because "|" is ungreedy
@@ -441,108 +490,120 @@ class Lexer(object):
root_tag_rules = compile_rules(environment)
# block suffix if trimming is enabled
- block_suffix_re = environment.trim_blocks and '\\n?' or ''
-
- # strip leading spaces if lstrip_blocks is enabled
- prefix_re = {}
- if environment.lstrip_blocks:
- # use '{%+' to manually disable lstrip_blocks behavior
- no_lstrip_re = e('+')
- # detect overlap between block and variable or comment strings
- block_diff = c(r'^%s(.*)' % e(environment.block_start_string))
- # make sure we don't mistake a block for a variable or a comment
- m = block_diff.match(environment.comment_start_string)
- no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
- m = block_diff.match(environment.variable_start_string)
- no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
-
- # detect overlap between comment and variable strings
- comment_diff = c(r'^%s(.*)' % e(environment.comment_start_string))
- m = comment_diff.match(environment.variable_start_string)
- no_variable_re = m and r'(?!%s)' % e(m.group(1)) or ''
-
- lstrip_re = r'^[ \t]*'
- block_prefix_re = r'%s%s(?!%s)|%s\+?' % (
- lstrip_re,
- e(environment.block_start_string),
- no_lstrip_re,
- e(environment.block_start_string),
- )
- comment_prefix_re = r'%s%s%s|%s\+?' % (
- lstrip_re,
- e(environment.comment_start_string),
- no_variable_re,
- e(environment.comment_start_string),
- )
- prefix_re['block'] = block_prefix_re
- prefix_re['comment'] = comment_prefix_re
- else:
- block_prefix_re = '%s' % e(environment.block_start_string)
+ block_suffix_re = environment.trim_blocks and "\\n?" or ""
+
+ # If lstrip is enabled, it should not be applied if there is any
+ # non-whitespace between the newline and block.
+ self.lstrip_unless_re = c(r"[^ \t]") if environment.lstrip_blocks else None
self.newline_sequence = environment.newline_sequence
self.keep_trailing_newline = environment.keep_trailing_newline
# global lexing rules
self.rules = {
- 'root': [
+ "root": [
# directives
- (c('(.*?)(?:%s)' % '|'.join(
- [r'(?P<raw_begin>(?:\s*%s\-|%s)\s*raw\s*(?:\-%s\s*|%s))' % (
- e(environment.block_start_string),
- block_prefix_re,
- e(environment.block_end_string),
- e(environment.block_end_string)
- )] + [
- r'(?P<%s_begin>\s*%s\-|%s)' % (n, r, prefix_re.get(n,r))
- for n, r in root_tag_rules
- ])), (TOKEN_DATA, '#bygroup'), '#bygroup'),
+ (
+ c(
+ "(.*?)(?:%s)"
+ % "|".join(
+ [
+ r"(?P<raw_begin>%s(\-|\+|)\s*raw\s*(?:\-%s\s*|%s))"
+ % (
+ e(environment.block_start_string),
+ e(environment.block_end_string),
+ e(environment.block_end_string),
+ )
+ ]
+ + [
+ r"(?P<%s>%s(\-|\+|))" % (n, r)
+ for n, r in root_tag_rules
+ ]
+ )
+ ),
+ OptionalLStrip(TOKEN_DATA, "#bygroup"),
+ "#bygroup",
+ ),
# data
- (c('.+'), TOKEN_DATA, None)
+ (c(".+"), TOKEN_DATA, None),
],
# comments
TOKEN_COMMENT_BEGIN: [
- (c(r'(.*?)((?:\-%s\s*|%s)%s)' % (
- e(environment.comment_end_string),
- e(environment.comment_end_string),
- block_suffix_re
- )), (TOKEN_COMMENT, TOKEN_COMMENT_END), '#pop'),
- (c('(.)'), (Failure('Missing end of comment tag'),), None)
+ (
+ c(
+ r"(.*?)((?:\-%s\s*|%s)%s)"
+ % (
+ e(environment.comment_end_string),
+ e(environment.comment_end_string),
+ block_suffix_re,
+ )
+ ),
+ (TOKEN_COMMENT, TOKEN_COMMENT_END),
+ "#pop",
+ ),
+ (c("(.)"), (Failure("Missing end of comment tag"),), None),
],
# blocks
TOKEN_BLOCK_BEGIN: [
- (c(r'(?:\-%s\s*|%s)%s' % (
- e(environment.block_end_string),
- e(environment.block_end_string),
- block_suffix_re
- )), TOKEN_BLOCK_END, '#pop'),
- ] + tag_rules,
+ (
+ c(
+ r"(?:\-%s\s*|%s)%s"
+ % (
+ e(environment.block_end_string),
+ e(environment.block_end_string),
+ block_suffix_re,
+ )
+ ),
+ TOKEN_BLOCK_END,
+ "#pop",
+ ),
+ ]
+ + tag_rules,
# variables
TOKEN_VARIABLE_BEGIN: [
- (c(r'\-%s\s*|%s' % (
- e(environment.variable_end_string),
- e(environment.variable_end_string)
- )), TOKEN_VARIABLE_END, '#pop')
- ] + tag_rules,
+ (
+ c(
+ r"\-%s\s*|%s"
+ % (
+ e(environment.variable_end_string),
+ e(environment.variable_end_string),
+ )
+ ),
+ TOKEN_VARIABLE_END,
+ "#pop",
+ )
+ ]
+ + tag_rules,
# raw block
TOKEN_RAW_BEGIN: [
- (c(r'(.*?)((?:\s*%s\-|%s)\s*endraw\s*(?:\-%s\s*|%s%s))' % (
- e(environment.block_start_string),
- block_prefix_re,
- e(environment.block_end_string),
- e(environment.block_end_string),
- block_suffix_re
- )), (TOKEN_DATA, TOKEN_RAW_END), '#pop'),
- (c('(.)'), (Failure('Missing end of raw directive'),), None)
+ (
+ c(
+ r"(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))"
+ % (
+ e(environment.block_start_string),
+ e(environment.block_end_string),
+ e(environment.block_end_string),
+ block_suffix_re,
+ )
+ ),
+ OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),
+ "#pop",
+ ),
+ (c("(.)"), (Failure("Missing end of raw directive"),), None),
],
# line statements
TOKEN_LINESTATEMENT_BEGIN: [
- (c(r'\s*(\n|$)'), TOKEN_LINESTATEMENT_END, '#pop')
- ] + tag_rules,
+ (c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
+ ]
+ + tag_rules,
# line comments
TOKEN_LINECOMMENT_BEGIN: [
- (c(r'(.*?)()(?=\n|$)'), (TOKEN_LINECOMMENT,
- TOKEN_LINECOMMENT_END), '#pop')
- ]
+ (
+ c(r"(.*?)()(?=\n|$)"),
+ (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
+ "#pop",
+ )
+ ],
}
def _normalize_newlines(self, value):
@@ -550,8 +611,7 @@ class Lexer(object):
return newline_re.sub(self.newline_sequence, value)
def tokenize(self, source, name=None, filename=None, state=None):
- """Calls tokeniter + tokenize and wraps it in a token stream.
- """
+ """Calls tokeniter + tokenize and wraps it in a token stream."""
stream = self.tokeniter(source, name, filename, state)
return TokenStream(self.wrap(stream, name, filename), name, filename)
@@ -562,37 +622,40 @@ class Lexer(object):
for lineno, token, value in stream:
if token in ignored_tokens:
continue
- elif token == 'linestatement_begin':
- token = 'block_begin'
- elif token == 'linestatement_end':
- token = 'block_end'
+ elif token == TOKEN_LINESTATEMENT_BEGIN:
+ token = TOKEN_BLOCK_BEGIN
+ elif token == TOKEN_LINESTATEMENT_END:
+ token = TOKEN_BLOCK_END
# we are not interested in those tokens in the parser
- elif token in ('raw_begin', 'raw_end'):
+ elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
continue
- elif token == 'data':
+ elif token == TOKEN_DATA:
value = self._normalize_newlines(value)
- elif token == 'keyword':
+ elif token == "keyword":
token = value
- elif token == 'name':
+ elif token == TOKEN_NAME:
value = str(value)
if check_ident and not value.isidentifier():
raise TemplateSyntaxError(
- 'Invalid character in identifier',
- lineno, name, filename)
- elif token == 'string':
+ "Invalid character in identifier", lineno, name, filename
+ )
+ elif token == TOKEN_STRING:
# try to unescape string
try:
- value = self._normalize_newlines(value[1:-1]) \
- .encode('ascii', 'backslashreplace') \
- .decode('unicode-escape')
+ value = (
+ self._normalize_newlines(value[1:-1])
+ .encode("ascii", "backslashreplace")
+ .decode("unicode-escape")
+ )
except Exception as e:
- msg = str(e).split(':')[-1].strip()
+ msg = str(e).split(":")[-1].strip()
raise TemplateSyntaxError(msg, lineno, name, filename)
- elif token == 'integer':
- value = int(value)
- elif token == 'float':
- value = float(value)
- elif token == 'operator':
+ elif token == TOKEN_INTEGER:
+ value = int(value.replace("_", ""))
+ elif token == TOKEN_FLOAT:
+ # remove all "_" first to support more Python versions
+ value = literal_eval(value.replace("_", ""))
+ elif token == TOKEN_OPERATOR:
token = operators[value]
yield Token(lineno, token, value)
@@ -603,23 +666,23 @@ class Lexer(object):
source = text_type(source)
lines = source.splitlines()
if self.keep_trailing_newline and source:
- for newline in ('\r\n', '\r', '\n'):
+ for newline in ("\r\n", "\r", "\n"):
if source.endswith(newline):
- lines.append('')
+ lines.append("")
break
- source = '\n'.join(lines)
+ source = "\n".join(lines)
pos = 0
lineno = 1
- stack = ['root']
- if state is not None and state != 'root':
- assert state in ('variable', 'block'), 'invalid state'
- stack.append(state + '_begin')
- else:
- state = 'root'
+ stack = ["root"]
+ if state is not None and state != "root":
+ assert state in ("variable", "block"), "invalid state"
+ stack.append(state + "_begin")
statetokens = self.rules[stack[-1]]
source_length = len(source)
-
balancing_stack = []
+ lstrip_unless_re = self.lstrip_unless_re
+ newlines_stripped = 0
+ line_starting = True
while 1:
# tokenizer loop
@@ -633,13 +696,48 @@ class Lexer(object):
# are balanced. continue parsing with the lower rule which
# is the operator rule. do this only if the end tags look
# like operators
- if balancing_stack and \
- tokens in ('variable_end', 'block_end',
- 'linestatement_end'):
+ if balancing_stack and tokens in (
+ TOKEN_VARIABLE_END,
+ TOKEN_BLOCK_END,
+ TOKEN_LINESTATEMENT_END,
+ ):
continue
# tuples support more options
if isinstance(tokens, tuple):
+ groups = m.groups()
+
+ if isinstance(tokens, OptionalLStrip):
+ # Rule supports lstrip. Match will look like
+ # text, block type, whitespace control, type, control, ...
+ text = groups[0]
+
+ # Skipping the text and first type, every other group is the
+ # whitespace control for each type. One of the groups will be
+ # -, +, or empty string instead of None.
+ strip_sign = next(g for g in groups[2::2] if g is not None)
+
+ if strip_sign == "-":
+ # Strip all whitespace between the text and the tag.
+ stripped = text.rstrip()
+ newlines_stripped = text[len(stripped) :].count("\n")
+ groups = (stripped,) + groups[1:]
+ elif (
+ # Not marked for preserving whitespace.
+ strip_sign != "+"
+ # lstrip is enabled.
+ and lstrip_unless_re is not None
+ # Not a variable expression.
+ and not m.groupdict().get(TOKEN_VARIABLE_BEGIN)
+ ):
+ # The start of text between the last newline and the tag.
+ l_pos = text.rfind("\n") + 1
+ if l_pos > 0 or line_starting:
+ # If there's only whitespace between the newline and the
+ # tag, strip it.
+ if not lstrip_unless_re.search(text, l_pos):
+ groups = (text[:l_pos],) + groups[1:]
+
for idx, token in enumerate(tokens):
# failure group
if token.__class__ is Failure:
@@ -647,51 +745,57 @@ class Lexer(object):
# bygroup is a bit more complex, in that case we
# yield for the current token the first named
# group that matched
- elif token == '#bygroup':
+ elif token == "#bygroup":
for key, value in iteritems(m.groupdict()):
if value is not None:
yield lineno, key, value
- lineno += value.count('\n')
+ lineno += value.count("\n")
break
else:
- raise RuntimeError('%r wanted to resolve '
- 'the token dynamically'
- ' but no group matched'
- % regex)
+ raise RuntimeError(
+ "%r wanted to resolve "
+ "the token dynamically"
+ " but no group matched" % regex
+ )
# normal group
else:
- data = m.group(idx + 1)
+ data = groups[idx]
if data or token not in ignore_if_empty:
yield lineno, token, data
- lineno += data.count('\n')
+ lineno += data.count("\n") + newlines_stripped
+ newlines_stripped = 0
# strings as token just are yielded as it.
else:
data = m.group()
# update brace/parentheses balance
- if tokens == 'operator':
- if data == '{':
- balancing_stack.append('}')
- elif data == '(':
- balancing_stack.append(')')
- elif data == '[':
- balancing_stack.append(']')
- elif data in ('}', ')', ']'):
+ if tokens == TOKEN_OPERATOR:
+ if data == "{":
+ balancing_stack.append("}")
+ elif data == "(":
+ balancing_stack.append(")")
+ elif data == "[":
+ balancing_stack.append("]")
+ elif data in ("}", ")", "]"):
if not balancing_stack:
- raise TemplateSyntaxError('unexpected \'%s\'' %
- data, lineno, name,
- filename)
+ raise TemplateSyntaxError(
+ "unexpected '%s'" % data, lineno, name, filename
+ )
expected_op = balancing_stack.pop()
if expected_op != data:
- raise TemplateSyntaxError('unexpected \'%s\', '
- 'expected \'%s\'' %
- (data, expected_op),
- lineno, name,
- filename)
+ raise TemplateSyntaxError(
+ "unexpected '%s', "
+ "expected '%s'" % (data, expected_op),
+ lineno,
+ name,
+ filename,
+ )
# yield items
if data or tokens not in ignore_if_empty:
yield lineno, tokens, data
- lineno += data.count('\n')
+ lineno += data.count("\n")
+
+ line_starting = m.group()[-1:] == "\n"
# fetch new position into new variable so that we can check
# if there is a internal parsing error which would result
@@ -701,19 +805,20 @@ class Lexer(object):
# handle state changes
if new_state is not None:
# remove the uppermost state
- if new_state == '#pop':
+ if new_state == "#pop":
stack.pop()
# resolve the new state by group checking
- elif new_state == '#bygroup':
+ elif new_state == "#bygroup":
for key, value in iteritems(m.groupdict()):
if value is not None:
stack.append(key)
break
else:
- raise RuntimeError('%r wanted to resolve the '
- 'new state dynamically but'
- ' no group matched' %
- regex)
+ raise RuntimeError(
+ "%r wanted to resolve the "
+ "new state dynamically but"
+ " no group matched" % regex
+ )
# direct state name given
else:
stack.append(new_state)
@@ -722,8 +827,9 @@ class Lexer(object):
# this means a loop without break condition, avoid that and
# raise error
elif pos2 == pos:
- raise RuntimeError('%r yielded empty string without '
- 'stack change' % regex)
+ raise RuntimeError(
+ "%r yielded empty string without stack change" % regex
+ )
# publish new function and start again
pos = pos2
break
@@ -734,6 +840,9 @@ class Lexer(object):
if pos >= source_length:
return
# something went wrong
- raise TemplateSyntaxError('unexpected char %r at %d' %
- (source[pos], pos), lineno,
- name, filename)
+ raise TemplateSyntaxError(
+ "unexpected char %r at %d" % (source[pos], pos),
+ lineno,
+ name,
+ filename,
+ )
diff --git a/lib/spack/external/jinja2/loaders.py b/lib/spack/external/jinja2/loaders.py
index 4c79793760..457c4b59a7 100644
--- a/lib/spack/external/jinja2/loaders.py
+++ b/lib/spack/external/jinja2/loaders.py
@@ -1,22 +1,21 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.loaders
- ~~~~~~~~~~~~~~
-
- Jinja loader classes.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
+"""API and implementations for loading templates from different data
+sources.
"""
import os
import sys
import weakref
-from types import ModuleType
-from os import path
from hashlib import sha1
-from jinja2.exceptions import TemplateNotFound
-from jinja2.utils import open_if_exists, internalcode
-from jinja2._compat import string_types, iteritems
+from os import path
+from types import ModuleType
+
+from ._compat import abc
+from ._compat import fspath
+from ._compat import iteritems
+from ._compat import string_types
+from .exceptions import TemplateNotFound
+from .utils import internalcode
+from .utils import open_if_exists
def split_template_path(template):
@@ -24,12 +23,14 @@ def split_template_path(template):
'..' in the path it will raise a `TemplateNotFound` error.
"""
pieces = []
- for piece in template.split('/'):
- if path.sep in piece \
- or (path.altsep and path.altsep in piece) or \
- piece == path.pardir:
+ for piece in template.split("/"):
+ if (
+ path.sep in piece
+ or (path.altsep and path.altsep in piece)
+ or piece == path.pardir
+ ):
raise TemplateNotFound(template)
- elif piece and piece != '.':
+ elif piece and piece != ".":
pieces.append(piece)
return pieces
@@ -86,15 +87,16 @@ class BaseLoader(object):
the template will be reloaded.
"""
if not self.has_source_access:
- raise RuntimeError('%s cannot provide access to the source' %
- self.__class__.__name__)
+ raise RuntimeError(
+ "%s cannot provide access to the source" % self.__class__.__name__
+ )
raise TemplateNotFound(template)
def list_templates(self):
"""Iterates over all templates. If the loader does not support that
it should raise a :exc:`TypeError` which is the default behavior.
"""
- raise TypeError('this loader cannot iterate over all templates')
+ raise TypeError("this loader cannot iterate over all templates")
@internalcode
def load(self, environment, name, globals=None):
@@ -131,8 +133,9 @@ class BaseLoader(object):
bucket.code = code
bcc.set_bucket(bucket)
- return environment.template_class.from_code(environment, code,
- globals, uptodate)
+ return environment.template_class.from_code(
+ environment, code, globals, uptodate
+ )
class FileSystemLoader(BaseLoader):
@@ -153,14 +156,20 @@ class FileSystemLoader(BaseLoader):
>>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
- .. versionchanged:: 2.8+
- The *followlinks* parameter was added.
+ .. versionchanged:: 2.8
+ The ``followlinks`` parameter was added.
"""
- def __init__(self, searchpath, encoding='utf-8', followlinks=False):
- if isinstance(searchpath, string_types):
+ def __init__(self, searchpath, encoding="utf-8", followlinks=False):
+ if not isinstance(searchpath, abc.Iterable) or isinstance(
+ searchpath, string_types
+ ):
searchpath = [searchpath]
- self.searchpath = list(searchpath)
+
+ # In Python 3.5, os.path.join doesn't support Path. This can be
+ # simplified to list(searchpath) when Python 3.5 is dropped.
+ self.searchpath = [fspath(p) for p in searchpath]
+
self.encoding = encoding
self.followlinks = followlinks
@@ -183,6 +192,7 @@ class FileSystemLoader(BaseLoader):
return path.getmtime(filename) == mtime
except OSError:
return False
+
return contents, filename, uptodate
raise TemplateNotFound(template)
@@ -190,12 +200,14 @@ class FileSystemLoader(BaseLoader):
found = set()
for searchpath in self.searchpath:
walk_dir = os.walk(searchpath, followlinks=self.followlinks)
- for dirpath, dirnames, filenames in walk_dir:
+ for dirpath, _, filenames in walk_dir:
for filename in filenames:
- template = os.path.join(dirpath, filename) \
- [len(searchpath):].strip(os.path.sep) \
- .replace(os.path.sep, '/')
- if template[:2] == './':
+ template = (
+ os.path.join(dirpath, filename)[len(searchpath) :]
+ .strip(os.path.sep)
+ .replace(os.path.sep, "/")
+ )
+ if template[:2] == "./":
template = template[2:]
if template not in found:
found.add(template)
@@ -217,10 +229,11 @@ class PackageLoader(BaseLoader):
from the file system and not a zip file.
"""
- def __init__(self, package_name, package_path='templates',
- encoding='utf-8'):
- from pkg_resources import DefaultProvider, ResourceManager, \
- get_provider
+ def __init__(self, package_name, package_path="templates", encoding="utf-8"):
+ from pkg_resources import DefaultProvider
+ from pkg_resources import get_provider
+ from pkg_resources import ResourceManager
+
provider = get_provider(package_name)
self.encoding = encoding
self.manager = ResourceManager()
@@ -230,14 +243,17 @@ class PackageLoader(BaseLoader):
def get_source(self, environment, template):
pieces = split_template_path(template)
- p = '/'.join((self.package_path,) + tuple(pieces))
+ p = "/".join((self.package_path,) + tuple(pieces))
+
if not self.provider.has_resource(p):
raise TemplateNotFound(template)
filename = uptodate = None
+
if self.filesystem_bound:
filename = self.provider.get_resource_filename(self.manager, p)
mtime = path.getmtime(filename)
+
def uptodate():
try:
return path.getmtime(filename) == mtime
@@ -249,19 +265,24 @@ class PackageLoader(BaseLoader):
def list_templates(self):
path = self.package_path
- if path[:2] == './':
+
+ if path[:2] == "./":
path = path[2:]
- elif path == '.':
- path = ''
+ elif path == ".":
+ path = ""
+
offset = len(path)
results = []
+
def _walk(path):
for filename in self.provider.resource_listdir(path):
- fullname = path + '/' + filename
+ fullname = path + "/" + filename
+
if self.provider.resource_isdir(fullname):
_walk(fullname)
else:
- results.append(fullname[offset:].lstrip('/'))
+ results.append(fullname[offset:].lstrip("/"))
+
_walk(path)
results.sort()
return results
@@ -334,7 +355,7 @@ class PrefixLoader(BaseLoader):
by loading ``'app2/index.html'`` the file from the second.
"""
- def __init__(self, mapping, delimiter='/'):
+ def __init__(self, mapping, delimiter="/"):
self.mapping = mapping
self.delimiter = delimiter
@@ -434,19 +455,20 @@ class ModuleLoader(BaseLoader):
has_source_access = False
def __init__(self, path):
- package_name = '_jinja2_module_templates_%x' % id(self)
+ package_name = "_jinja2_module_templates_%x" % id(self)
# create a fake module that looks for the templates in the
# path given.
mod = _TemplateModule(package_name)
- if isinstance(path, string_types):
+
+ if not isinstance(path, abc.Iterable) or isinstance(path, string_types):
path = [path]
- else:
- path = list(path)
- mod.__path__ = path
- sys.modules[package_name] = weakref.proxy(mod,
- lambda x: sys.modules.pop(package_name, None))
+ mod.__path__ = [fspath(p) for p in path]
+
+ sys.modules[package_name] = weakref.proxy(
+ mod, lambda x: sys.modules.pop(package_name, None)
+ )
# the only strong reference, the sys.modules entry is weak
# so that the garbage collector can remove it once the
@@ -456,20 +478,20 @@ class ModuleLoader(BaseLoader):
@staticmethod
def get_template_key(name):
- return 'tmpl_' + sha1(name.encode('utf-8')).hexdigest()
+ return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
@staticmethod
def get_module_filename(name):
- return ModuleLoader.get_template_key(name) + '.py'
+ return ModuleLoader.get_template_key(name) + ".py"
@internalcode
def load(self, environment, name, globals=None):
key = self.get_template_key(name)
- module = '%s.%s' % (self.package_name, key)
+ module = "%s.%s" % (self.package_name, key)
mod = getattr(self.module, module, None)
if mod is None:
try:
- mod = __import__(module, None, None, ['root'])
+ mod = __import__(module, None, None, ["root"])
except ImportError:
raise TemplateNotFound(name)
@@ -478,4 +500,5 @@ class ModuleLoader(BaseLoader):
sys.modules.pop(module, None)
return environment.template_class.from_module_dict(
- environment, mod.__dict__, globals)
+ environment, mod.__dict__, globals
+ )
diff --git a/lib/spack/external/jinja2/meta.py b/lib/spack/external/jinja2/meta.py
index 7421914f77..3795aace59 100644
--- a/lib/spack/external/jinja2/meta.py
+++ b/lib/spack/external/jinja2/meta.py
@@ -1,25 +1,18 @@
# -*- coding: utf-8 -*-
+"""Functions that expose information about templates that might be
+interesting for introspection.
"""
- jinja2.meta
- ~~~~~~~~~~~
-
- This module implements various functions that exposes information about
- templates that might be interesting for various kinds of applications.
-
- :copyright: (c) 2017 by the Jinja Team, see AUTHORS for more details.
- :license: BSD, see LICENSE for more details.
-"""
-from jinja2 import nodes
-from jinja2.compiler import CodeGenerator
-from jinja2._compat import string_types, iteritems
+from . import nodes
+from ._compat import iteritems
+from ._compat import string_types
+from .compiler import CodeGenerator
class TrackingCodeGenerator(CodeGenerator):
"""We abuse the code generator for introspection."""
def __init__(self, environment):
- CodeGenerator.__init__(self, environment, '<introspection>',
- '<introspection>')
+ CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>")
self.undeclared_identifiers = set()
def write(self, x):
@@ -29,7 +22,7 @@ class TrackingCodeGenerator(CodeGenerator):
"""Remember all undeclared identifiers."""
CodeGenerator.enter_frame(self, frame)
for _, (action, param) in iteritems(frame.symbols.loads):
- if action == 'resolve':
+ if action == "resolve" and param not in self.environment.globals:
self.undeclared_identifiers.add(param)
@@ -72,8 +65,9 @@ def find_referenced_templates(ast):
This function is useful for dependency tracking. For example if you want
to rebuild parts of the website after a layout template has changed.
"""
- for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import,
- nodes.Include)):
+ for node in ast.find_all(
+ (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
+ ):
if not isinstance(node.template, nodes.Const):
# a tuple with some non consts in there
if isinstance(node.template, (nodes.Tuple, nodes.List)):
@@ -96,8 +90,9 @@ def find_referenced_templates(ast):
# a tuple or list (latter *should* not happen) made of consts,
# yield the consts that are strings. We could warn here for
# non string values
- elif isinstance(node, nodes.Include) and \
- isinstance(node.template.value, (tuple, list)):
+ elif isinstance(node, nodes.Include) and isinstance(
+ node.template.value, (tuple, list)
+ ):
for template_name in node.template.value:
if isinstance(template_name, string_types):
yield template_name
diff --git a/lib/spack/external/jinja2/nativetypes.py b/lib/spack/external/jinja2/nativetypes.py
index fe17e4138d..a9ead4e2bb 100644
--- a/lib/spack/external/jinja2/nativetypes.py
+++ b/lib/spack/external/jinja2/nativetypes.py
@@ -1,19 +1,23 @@
-import sys
from ast import literal_eval
-from itertools import islice, chain
-from jinja2 import nodes
-from jinja2._compat import text_type
-from jinja2.compiler import CodeGenerator, has_safe_repr
-from jinja2.environment import Environment, Template
-from jinja2.utils import concat, escape
+from itertools import chain
+from itertools import islice
+
+from . import nodes
+from ._compat import text_type
+from .compiler import CodeGenerator
+from .compiler import has_safe_repr
+from .environment import Environment
+from .environment import Template
def native_concat(nodes):
- """Return a native Python type from the list of compiled nodes. If the
- result is a single node, its value is returned. Otherwise, the nodes are
- concatenated as strings. If the result can be parsed with
- :func:`ast.literal_eval`, the parsed value is returned. Otherwise, the
- string is returned.
+ """Return a native Python type from the list of compiled nodes. If
+ the result is a single node, its value is returned. Otherwise, the
+ nodes are concatenated as strings. If the result can be parsed with
+ :func:`ast.literal_eval`, the parsed value is returned. Otherwise,
+ the string is returned.
+
+ :param nodes: Iterable of nodes to concatenate.
"""
head = list(islice(nodes, 2))
@@ -21,200 +25,70 @@ def native_concat(nodes):
return None
if len(head) == 1:
- out = head[0]
+ raw = head[0]
else:
- out = u''.join([text_type(v) for v in chain(head, nodes)])
+ raw = u"".join([text_type(v) for v in chain(head, nodes)])
try:
- return literal_eval(out)
+ return literal_eval(raw)
except (ValueError, SyntaxError, MemoryError):
- return out
+ return raw
class NativeCodeGenerator(CodeGenerator):
- """A code generator which avoids injecting ``to_string()`` calls around the
- internal code Jinja uses to render templates.
+ """A code generator which renders Python types by not adding
+ ``to_string()`` around output nodes.
"""
- def visit_Output(self, node, frame):
- """Same as :meth:`CodeGenerator.visit_Output`, but do not call
- ``to_string`` on output nodes in generated code.
- """
- if self.has_known_extends and frame.require_output_check:
- return
-
- finalize = self.environment.finalize
- finalize_context = getattr(finalize, 'contextfunction', False)
- finalize_eval = getattr(finalize, 'evalcontextfunction', False)
- finalize_env = getattr(finalize, 'environmentfunction', False)
-
- if finalize is not None:
- if finalize_context or finalize_eval:
- const_finalize = None
- elif finalize_env:
- def const_finalize(x):
- return finalize(self.environment, x)
- else:
- const_finalize = finalize
- else:
- def const_finalize(x):
- return x
-
- # If we are inside a frame that requires output checking, we do so.
- outdent_later = False
-
- if frame.require_output_check:
- self.writeline('if parent_template is None:')
- self.indent()
- outdent_later = True
-
- # Try to evaluate as many chunks as possible into a static string at
- # compile time.
- body = []
-
- for child in node.nodes:
- try:
- if const_finalize is None:
- raise nodes.Impossible()
-
- const = child.as_const(frame.eval_ctx)
- if not has_safe_repr(const):
- raise nodes.Impossible()
- except nodes.Impossible:
- body.append(child)
- continue
-
- # the frame can't be volatile here, because otherwise the as_const
- # function would raise an Impossible exception at that point
- try:
- if frame.eval_ctx.autoescape:
- if hasattr(const, '__html__'):
- const = const.__html__()
- else:
- const = escape(const)
-
- const = const_finalize(const)
- except Exception:
- # if something goes wrong here we evaluate the node at runtime
- # for easier debugging
- body.append(child)
- continue
-
- if body and isinstance(body[-1], list):
- body[-1].append(const)
- else:
- body.append([const])
-
- # if we have less than 3 nodes or a buffer we yield or extend/append
- if len(body) < 3 or frame.buffer is not None:
- if frame.buffer is not None:
- # for one item we append, for more we extend
- if len(body) == 1:
- self.writeline('%s.append(' % frame.buffer)
- else:
- self.writeline('%s.extend((' % frame.buffer)
-
- self.indent()
-
- for item in body:
- if isinstance(item, list):
- val = repr(native_concat(item))
-
- if frame.buffer is None:
- self.writeline('yield ' + val)
- else:
- self.writeline(val + ',')
- else:
- if frame.buffer is None:
- self.writeline('yield ', item)
- else:
- self.newline(item)
-
- close = 0
-
- if finalize is not None:
- self.write('environment.finalize(')
-
- if finalize_context:
- self.write('context, ')
-
- close += 1
-
- self.visit(item, frame)
-
- if close > 0:
- self.write(')' * close)
-
- if frame.buffer is not None:
- self.write(',')
-
- if frame.buffer is not None:
- # close the open parentheses
- self.outdent()
- self.writeline(len(body) == 1 and ')' or '))')
-
- # otherwise we create a format string as this is faster in that case
- else:
- format = []
- arguments = []
-
- for item in body:
- if isinstance(item, list):
- format.append(native_concat(item).replace('%', '%%'))
- else:
- format.append('%s')
- arguments.append(item)
-
- self.writeline('yield ')
- self.write(repr(concat(format)) + ' % (')
- self.indent()
-
- for argument in arguments:
- self.newline(argument)
- close = 0
-
- if finalize is not None:
- self.write('environment.finalize(')
-
- if finalize_context:
- self.write('context, ')
- elif finalize_eval:
- self.write('context.eval_ctx, ')
- elif finalize_env:
- self.write('environment, ')
-
- close += 1
-
- self.visit(argument, frame)
- self.write(')' * close + ', ')
-
- self.outdent()
- self.writeline(')')
+ @staticmethod
+ def _default_finalize(value):
+ return value
+
+ def _output_const_repr(self, group):
+ return repr(u"".join([text_type(v) for v in group]))
+
+ def _output_child_to_const(self, node, frame, finalize):
+ const = node.as_const(frame.eval_ctx)
+
+ if not has_safe_repr(const):
+ raise nodes.Impossible()
- if outdent_later:
- self.outdent()
+ if isinstance(node, nodes.TemplateData):
+ return const
+
+ return finalize.const(const)
+
+ def _output_child_pre(self, node, frame, finalize):
+ if finalize.src is not None:
+ self.write(finalize.src)
+
+ def _output_child_post(self, node, frame, finalize):
+ if finalize.src is not None:
+ self.write(")")
+
+
+class NativeEnvironment(Environment):
+ """An environment that renders templates to native Python types."""
+
+ code_generator_class = NativeCodeGenerator
class NativeTemplate(Template):
+ environment_class = NativeEnvironment
+
def render(self, *args, **kwargs):
- """Render the template to produce a native Python type. If the result
- is a single node, its value is returned. Otherwise, the nodes are
- concatenated as strings. If the result can be parsed with
- :func:`ast.literal_eval`, the parsed value is returned. Otherwise, the
- string is returned.
+ """Render the template to produce a native Python type. If the
+ result is a single node, its value is returned. Otherwise, the
+ nodes are concatenated as strings. If the result can be parsed
+ with :func:`ast.literal_eval`, the parsed value is returned.
+ Otherwise, the string is returned.
"""
vars = dict(*args, **kwargs)
try:
return native_concat(self.root_render_func(self.new_context(vars)))
except Exception:
- exc_info = sys.exc_info()
+ return self.environment.handle_exception()
- return self.environment.handle_exception(exc_info, True)
-
-class NativeEnvironment(Environment):
- """An environment that renders templates to native Python types."""
-
- code_generator_class = NativeCodeGenerator
- template_class = NativeTemplate
+NativeEnvironment.template_class = NativeTemplate
diff --git a/lib/spack/external/jinja2/nodes.py b/lib/spack/external/jinja2/nodes.py
index 4d9a01ad8b..95bd614a14 100644
--- a/lib/spack/external/jinja2/nodes.py
+++ b/lib/spack/external/jinja2/nodes.py
@@ -1,54 +1,39 @@
# -*- coding: utf-8 -*-
+"""AST nodes generated by the parser for the compiler. Also provides
+some node tree helper functions used by the parser and compiler in order
+to normalize nodes.
"""
- jinja2.nodes
- ~~~~~~~~~~~~
-
- This module implements additional nodes derived from the ast base node.
-
- It also provides some node tree helper functions like `in_lineno` and
- `get_nodes` used by the parser and translator in order to normalize
- python and jinja nodes.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-import types
import operator
-
from collections import deque
-from jinja2.utils import Markup
-from jinja2._compat import izip, with_metaclass, text_type, PY2
-
-#: the types we support for context functions
-_context_function_types = (types.FunctionType, types.MethodType)
+from markupsafe import Markup
+from ._compat import izip
+from ._compat import PY2
+from ._compat import text_type
+from ._compat import with_metaclass
_binop_to_func = {
- '*': operator.mul,
- '/': operator.truediv,
- '//': operator.floordiv,
- '**': operator.pow,
- '%': operator.mod,
- '+': operator.add,
- '-': operator.sub
+ "*": operator.mul,
+ "/": operator.truediv,
+ "//": operator.floordiv,
+ "**": operator.pow,
+ "%": operator.mod,
+ "+": operator.add,
+ "-": operator.sub,
}
-_uaop_to_func = {
- 'not': operator.not_,
- '+': operator.pos,
- '-': operator.neg
-}
+_uaop_to_func = {"not": operator.not_, "+": operator.pos, "-": operator.neg}
_cmpop_to_func = {
- 'eq': operator.eq,
- 'ne': operator.ne,
- 'gt': operator.gt,
- 'gteq': operator.ge,
- 'lt': operator.lt,
- 'lteq': operator.le,
- 'in': lambda a, b: a in b,
- 'notin': lambda a, b: a not in b
+ "eq": operator.eq,
+ "ne": operator.ne,
+ "gt": operator.gt,
+ "gteq": operator.ge,
+ "lt": operator.lt,
+ "lteq": operator.le,
+ "in": lambda a, b: a in b,
+ "notin": lambda a, b: a not in b,
}
@@ -61,16 +46,16 @@ class NodeType(type):
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
- def __new__(cls, name, bases, d):
- for attr in 'fields', 'attributes':
+ def __new__(mcs, name, bases, d):
+ for attr in "fields", "attributes":
storage = []
storage.extend(getattr(bases[0], attr, ()))
storage.extend(d.get(attr, ()))
- assert len(bases) == 1, 'multiple inheritance not allowed'
- assert len(storage) == len(set(storage)), 'layout conflict'
+ assert len(bases) == 1, "multiple inheritance not allowed"
+ assert len(storage) == len(set(storage)), "layout conflict"
d[attr] = tuple(storage)
- d.setdefault('abstract', False)
- return type.__new__(cls, name, bases, d)
+ d.setdefault("abstract", False)
+ return type.__new__(mcs, name, bases, d)
class EvalContext(object):
@@ -97,15 +82,17 @@ class EvalContext(object):
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
- raise RuntimeError('if no eval context is passed, the '
- 'node must have an attached '
- 'environment.')
+ raise RuntimeError(
+ "if no eval context is passed, the "
+ "node must have an attached "
+ "environment."
+ )
return EvalContext(node.environment)
return ctx
class Node(with_metaclass(NodeType, object)):
- """Baseclass for all Jinja2 nodes. There are a number of nodes available
+ """Baseclass for all Jinja nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
@@ -120,30 +107,32 @@ class Node(with_metaclass(NodeType, object)):
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
+
fields = ()
- attributes = ('lineno', 'environment')
+ attributes = ("lineno", "environment")
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
- raise TypeError('abstract nodes are not instanciable')
+ raise TypeError("abstract nodes are not instantiable")
if fields:
if len(fields) != len(self.fields):
if not self.fields:
- raise TypeError('%r takes 0 arguments' %
- self.__class__.__name__)
- raise TypeError('%r takes 0 or %d argument%s' % (
- self.__class__.__name__,
- len(self.fields),
- len(self.fields) != 1 and 's' or ''
- ))
+ raise TypeError("%r takes 0 arguments" % self.__class__.__name__)
+ raise TypeError(
+ "%r takes 0 or %d argument%s"
+ % (
+ self.__class__.__name__,
+ len(self.fields),
+ len(self.fields) != 1 and "s" or "",
+ )
+ )
for name, arg in izip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
- raise TypeError('unknown attribute %r' %
- next(iter(attributes)))
+ raise TypeError("unknown attribute %r" % next(iter(attributes)))
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
@@ -153,9 +142,11 @@ class Node(with_metaclass(NodeType, object)):
should be sets or tuples of field names.
"""
for name in self.fields:
- if (exclude is only is None) or \
- (exclude is not None and name not in exclude) or \
- (only is not None and name in only):
+ if (
+ (exclude is only is None)
+ or (exclude is not None and name not in exclude)
+ or (only is not None and name in only)
+ ):
try:
yield name, getattr(self, name)
except AttributeError:
@@ -166,7 +157,7 @@ class Node(with_metaclass(NodeType, object)):
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
- for field, item in self.iter_fields(exclude, only):
+ for _, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
@@ -200,7 +191,7 @@ class Node(with_metaclass(NodeType, object)):
todo = deque([self])
while todo:
node = todo.popleft()
- if 'ctx' in node.fields:
+ if "ctx" in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
@@ -210,7 +201,7 @@ class Node(with_metaclass(NodeType, object)):
todo = deque([self])
while todo:
node = todo.popleft()
- if 'lineno' in node.attributes:
+ if "lineno" in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
@@ -226,8 +217,9 @@ class Node(with_metaclass(NodeType, object)):
return self
def __eq__(self, other):
- return type(self) is type(other) and \
- tuple(self.iter_fields()) == tuple(other.iter_fields())
+ return type(self) is type(other) and tuple(self.iter_fields()) == tuple(
+ other.iter_fields()
+ )
def __ne__(self, other):
return not self.__eq__(other)
@@ -236,10 +228,9 @@ class Node(with_metaclass(NodeType, object)):
__hash__ = object.__hash__
def __repr__(self):
- return '%s(%s)' % (
+ return "%s(%s)" % (
self.__class__.__name__,
- ', '.join('%s=%r' % (arg, getattr(self, arg, None)) for
- arg in self.fields)
+ ", ".join("%s=%r" % (arg, getattr(self, arg, None)) for arg in self.fields),
)
def dump(self):
@@ -248,37 +239,39 @@ class Node(with_metaclass(NodeType, object)):
buf.append(repr(node))
return
- buf.append('nodes.%s(' % node.__class__.__name__)
+ buf.append("nodes.%s(" % node.__class__.__name__)
if not node.fields:
- buf.append(')')
+ buf.append(")")
return
for idx, field in enumerate(node.fields):
if idx:
- buf.append(', ')
+ buf.append(", ")
value = getattr(node, field)
if isinstance(value, list):
- buf.append('[')
+ buf.append("[")
for idx, item in enumerate(value):
if idx:
- buf.append(', ')
+ buf.append(", ")
_dump(item)
- buf.append(']')
+ buf.append("]")
else:
_dump(value)
- buf.append(')')
+ buf.append(")")
+
buf = []
_dump(self)
- return ''.join(buf)
-
+ return "".join(buf)
class Stmt(Node):
"""Base node for all statements."""
+
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
+
abstract = True
@@ -286,19 +279,22 @@ class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
- fields = ('body',)
+
+ fields = ("body",)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
- fields = ('nodes',)
+
+ fields = ("nodes",)
class Extends(Stmt):
"""Represents an extends statement."""
- fields = ('template',)
+
+ fields = ("template",)
class For(Stmt):
@@ -309,12 +305,14 @@ class For(Stmt):
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
- fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive')
+
+ fields = ("target", "iter", "body", "else_", "test", "recursive")
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
- fields = ('test', 'body', 'elif_', 'else_')
+
+ fields = ("test", "body", "elif_", "else_")
class Macro(Stmt):
@@ -322,19 +320,22 @@ class Macro(Stmt):
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
- fields = ('name', 'args', 'defaults', 'body')
+
+ fields = ("name", "args", "defaults", "body")
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
- fields = ('call', 'args', 'defaults', 'body')
+
+ fields = ("call", "args", "defaults", "body")
class FilterBlock(Stmt):
"""Node for filter sections."""
- fields = ('body', 'filter')
+
+ fields = ("body", "filter")
class With(Stmt):
@@ -343,22 +344,26 @@ class With(Stmt):
.. versionadded:: 2.9.3
"""
- fields = ('targets', 'values', 'body')
+
+ fields = ("targets", "values", "body")
class Block(Stmt):
"""A node that represents a block."""
- fields = ('name', 'body', 'scoped')
+
+ fields = ("name", "body", "scoped")
class Include(Stmt):
"""A node that represents the include tag."""
- fields = ('template', 'with_context', 'ignore_missing')
+
+ fields = ("template", "with_context", "ignore_missing")
class Import(Stmt):
"""A node that represents the import tag."""
- fields = ('template', 'target', 'with_context')
+
+ fields = ("template", "target", "with_context")
class FromImport(Stmt):
@@ -372,26 +377,31 @@ class FromImport(Stmt):
The list of names may contain tuples if aliases are wanted.
"""
- fields = ('template', 'names', 'with_context')
+
+ fields = ("template", "names", "with_context")
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
- fields = ('node',)
+
+ fields = ("node",)
class Assign(Stmt):
"""Assigns an expression to a target."""
- fields = ('target', 'node')
+
+ fields = ("target", "node")
class AssignBlock(Stmt):
"""Assigns a block to a target."""
- fields = ('target', 'filter', 'body')
+
+ fields = ("target", "filter", "body")
class Expr(Node):
"""Baseclass for all expressions."""
+
abstract = True
def as_const(self, eval_ctx=None):
@@ -414,15 +424,18 @@ class Expr(Node):
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
- fields = ('left', 'right')
+
+ fields = ("left", "right")
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
- if self.environment.sandboxed and \
- self.operator in self.environment.intercepted_binops:
+ if (
+ self.environment.sandboxed
+ and self.operator in self.environment.intercepted_binops
+ ):
raise Impossible()
f = _binop_to_func[self.operator]
try:
@@ -433,15 +446,18 @@ class BinExpr(Expr):
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
- fields = ('node',)
+
+ fields = ("node",)
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
- if self.environment.sandboxed and \
- self.operator in self.environment.intercepted_unops:
+ if (
+ self.environment.sandboxed
+ and self.operator in self.environment.intercepted_unops
+ ):
raise Impossible()
f = _uaop_to_func[self.operator]
try:
@@ -458,16 +474,17 @@ class Name(Expr):
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
- fields = ('name', 'ctx')
+
+ fields = ("name", "ctx")
def can_assign(self):
- return self.name not in ('true', 'false', 'none',
- 'True', 'False', 'None')
+ return self.name not in ("true", "false", "none", "True", "False", "None")
class NSRef(Expr):
"""Reference to a namespace value assignment"""
- fields = ('name', 'attr')
+
+ fields = ("name", "attr")
def can_assign(self):
# We don't need any special checks here; NSRef assignments have a
@@ -479,6 +496,7 @@ class NSRef(Expr):
class Literal(Expr):
"""Baseclass for literals."""
+
abstract = True
@@ -488,14 +506,18 @@ class Const(Literal):
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
- fields = ('value',)
+
+ fields = ("value",)
def as_const(self, eval_ctx=None):
rv = self.value
- if PY2 and type(rv) is text_type and \
- self.environment.policies['compiler.ascii_str']:
+ if (
+ PY2
+ and type(rv) is text_type
+ and self.environment.policies["compiler.ascii_str"]
+ ):
try:
- rv = rv.encode('ascii')
+ rv = rv.encode("ascii")
except UnicodeError:
pass
return rv
@@ -507,6 +529,7 @@ class Const(Literal):
an `Impossible` exception.
"""
from .compiler import has_safe_repr
+
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
@@ -514,7 +537,8 @@ class Const(Literal):
class TemplateData(Literal):
"""A constant template string."""
- fields = ('data',)
+
+ fields = ("data",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -530,7 +554,8 @@ class Tuple(Literal):
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
- fields = ('items', 'ctx')
+
+ fields = ("items", "ctx")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -545,7 +570,8 @@ class Tuple(Literal):
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
- fields = ('items',)
+
+ fields = ("items",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -556,7 +582,8 @@ class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
- fields = ('items',)
+
+ fields = ("items",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -565,7 +592,8 @@ class Dict(Literal):
class Pair(Helper):
"""A key, value pair for dicts."""
- fields = ('key', 'value')
+
+ fields = ("key", "value")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -574,7 +602,8 @@ class Pair(Helper):
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
- fields = ('key', 'value')
+
+ fields = ("key", "value")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -585,7 +614,8 @@ class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
- fields = ('test', 'expr1', 'expr2')
+
+ fields = ("test", "expr1", "expr2")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -626,7 +656,7 @@ class Filter(Expr):
filtered. Buffers are created by macros and filter blocks.
"""
- fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
+ fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -636,28 +666,27 @@ class Filter(Expr):
# we have to be careful here because we call filter_ below.
# if this variable would be called filter, 2to3 would wrap the
- # call in a list beause it is assuming we are talking about the
+ # call in a list because it is assuming we are talking about the
# builtin filter function here which no longer returns a list in
# python 3. because of that, do not rename filter_ to filter!
filter_ = self.environment.filters.get(self.name)
- if filter_ is None or getattr(filter_, 'contextfilter', False):
+ if filter_ is None or getattr(filter_, "contextfilter", False) is True:
raise Impossible()
# We cannot constant handle async filters, so we need to make sure
# to not go down this path.
- if (
- eval_ctx.environment.is_async
- and getattr(filter_, 'asyncfiltervariant', False)
+ if eval_ctx.environment.is_async and getattr(
+ filter_, "asyncfiltervariant", False
):
raise Impossible()
args, kwargs = args_as_const(self, eval_ctx)
args.insert(0, self.node.as_const(eval_ctx))
- if getattr(filter_, 'evalcontextfilter', False):
+ if getattr(filter_, "evalcontextfilter", False) is True:
args.insert(0, eval_ctx)
- elif getattr(filter_, 'environmentfilter', False):
+ elif getattr(filter_, "environmentfilter", False) is True:
args.insert(0, self.environment)
try:
@@ -671,7 +700,7 @@ class Test(Expr):
rest of the fields are the same as for :class:`Call`.
"""
- fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
+ fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
def as_const(self, eval_ctx=None):
test = self.environment.tests.get(self.name)
@@ -696,20 +725,23 @@ class Call(Expr):
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
- fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
+
+ fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
- fields = ('node', 'arg', 'ctx')
+
+ fields = ("node", "arg", "ctx")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
- if self.ctx != 'load':
+ if self.ctx != "load":
raise Impossible()
try:
- return self.environment.getitem(self.node.as_const(eval_ctx),
- self.arg.as_const(eval_ctx))
+ return self.environment.getitem(
+ self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
+ )
except Exception:
raise Impossible()
@@ -721,15 +753,15 @@ class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
- fields = ('node', 'attr', 'ctx')
+
+ fields = ("node", "attr", "ctx")
def as_const(self, eval_ctx=None):
- if self.ctx != 'load':
+ if self.ctx != "load":
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
- return self.environment.getattr(self.node.as_const(eval_ctx),
- self.attr)
+ return self.environment.getattr(self.node.as_const(eval_ctx), self.attr)
except Exception:
raise Impossible()
@@ -741,14 +773,17 @@ class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
- fields = ('start', 'stop', 'step')
+
+ fields = ("start", "stop", "step")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
+
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
+
return slice(const(self.start), const(self.stop), const(self.step))
@@ -756,82 +791,103 @@ class Concat(Expr):
"""Concatenates the list of expressions provided after converting them to
unicode.
"""
- fields = ('nodes',)
+
+ fields = ("nodes",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
- return ''.join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
+ return "".join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\\s.
"""
- fields = ('expr', 'ops')
+
+ fields = ("expr", "ops")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
+
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
+
+ if not result:
+ return False
+
value = new_value
except Exception:
raise Impossible()
+
return result
class Operand(Helper):
"""Holds an operator and an expression."""
- fields = ('op', 'expr')
+
+ fields = ("op", "expr")
+
if __debug__:
- Operand.__doc__ += '\nThe following operators are available: ' + \
- ', '.join(sorted('``%s``' % x for x in set(_binop_to_func) |
- set(_uaop_to_func) | set(_cmpop_to_func)))
+ Operand.__doc__ += "\nThe following operators are available: " + ", ".join(
+ sorted(
+ "``%s``" % x
+ for x in set(_binop_to_func) | set(_uaop_to_func) | set(_cmpop_to_func)
+ )
+ )
class Mul(BinExpr):
"""Multiplies the left with the right node."""
- operator = '*'
+
+ operator = "*"
class Div(BinExpr):
"""Divides the left by the right node."""
- operator = '/'
+
+ operator = "/"
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
- operator = '//'
+
+ operator = "//"
class Add(BinExpr):
"""Add the left to the right node."""
- operator = '+'
+
+ operator = "+"
class Sub(BinExpr):
"""Subtract the right from the left node."""
- operator = '-'
+
+ operator = "-"
class Mod(BinExpr):
"""Left modulo right."""
- operator = '%'
+
+ operator = "%"
class Pow(BinExpr):
"""Left to the power of right."""
- operator = '**'
+
+ operator = "**"
class And(BinExpr):
"""Short circuited AND."""
- operator = 'and'
+
+ operator = "and"
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -840,7 +896,8 @@ class And(BinExpr):
class Or(BinExpr):
"""Short circuited OR."""
- operator = 'or'
+
+ operator = "or"
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -849,17 +906,20 @@ class Or(BinExpr):
class Not(UnaryExpr):
"""Negate the expression."""
- operator = 'not'
+
+ operator = "not"
class Neg(UnaryExpr):
"""Make the expression negative."""
- operator = '-'
+
+ operator = "-"
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
- operator = '+'
+
+ operator = "+"
# Helpers for extensions
@@ -869,7 +929,8 @@ class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
- fields = ('name',)
+
+ fields = ("name",)
class ExtensionAttribute(Expr):
@@ -879,7 +940,8 @@ class ExtensionAttribute(Expr):
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
- fields = ('identifier', 'name')
+
+ fields = ("identifier", "name")
class ImportedName(Expr):
@@ -888,7 +950,8 @@ class ImportedName(Expr):
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
- fields = ('importname',)
+
+ fields = ("importname",)
class InternalName(Expr):
@@ -898,16 +961,20 @@ class InternalName(Expr):
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
- fields = ('name',)
+
+ fields = ("name",)
def __init__(self):
- raise TypeError('Can\'t create internal names. Use the '
- '`free_identifier` method on a parser.')
+ raise TypeError(
+ "Can't create internal names. Use the "
+ "`free_identifier` method on a parser."
+ )
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
- fields = ('expr',)
+
+ fields = ("expr",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -920,7 +987,8 @@ class MarkSafeIfAutoescape(Expr):
.. versionadded:: 2.5
"""
- fields = ('expr',)
+
+ fields = ("expr",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
@@ -942,6 +1010,20 @@ class ContextReference(Expr):
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
+
+ This is basically equivalent to using the
+ :func:`~jinja2.contextfunction` decorator when using the
+ high-level API, which causes a reference to the context to be passed
+ as the first argument to a function.
+ """
+
+
+class DerivedContextReference(Expr):
+ """Return the current template context including locals. Behaves
+ exactly like :class:`ContextReference`, but includes local
+ variables, such as from a ``for`` loop.
+
+ .. versionadded:: 2.11
"""
@@ -955,7 +1037,8 @@ class Break(Stmt):
class Scope(Stmt):
"""An artificial scope."""
- fields = ('body',)
+
+ fields = ("body",)
class OverlayScope(Stmt):
@@ -971,7 +1054,8 @@ class OverlayScope(Stmt):
.. versionadded:: 2.10
"""
- fields = ('context', 'body')
+
+ fields = ("context", "body")
class EvalContextModifier(Stmt):
@@ -982,7 +1066,8 @@ class EvalContextModifier(Stmt):
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
- fields = ('options',)
+
+ fields = ("options",)
class ScopedEvalContextModifier(EvalContextModifier):
@@ -990,10 +1075,14 @@ class ScopedEvalContextModifier(EvalContextModifier):
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
- fields = ('body',)
+
+ fields = ("body",)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
- raise TypeError('can\'t create custom node types')
-NodeType.__new__ = staticmethod(_failing_new); del _failing_new
+ raise TypeError("can't create custom node types")
+
+
+NodeType.__new__ = staticmethod(_failing_new)
+del _failing_new
diff --git a/lib/spack/external/jinja2/optimizer.py b/lib/spack/external/jinja2/optimizer.py
index 65ab3ceb71..7bc78c4524 100644
--- a/lib/spack/external/jinja2/optimizer.py
+++ b/lib/spack/external/jinja2/optimizer.py
@@ -1,23 +1,15 @@
# -*- coding: utf-8 -*-
+"""The optimizer tries to constant fold expressions and modify the AST
+in place so that it should be faster to evaluate.
+
+Because the AST does not contain all the scoping information and the
+compiler has to find that out, we cannot do all the optimizations we
+want. For example, loop unrolling doesn't work because unrolled loops
+would have a different scope. The solution would be a second syntax tree
+that stored the scoping rules.
"""
- jinja2.optimizer
- ~~~~~~~~~~~~~~~~
-
- The jinja optimizer is currently trying to constant fold a few expressions
- and modify the AST in place so that it should be easier to evaluate it.
-
- Because the AST does not contain all the scoping information and the
- compiler has to find that out, we cannot do all the optimizations we
- want. For example loop unrolling doesn't work because unrolled loops would
- have a different scoping.
-
- The solution would be a second syntax tree that has the scoping rules stored.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD.
-"""
-from jinja2 import nodes
-from jinja2.visitor import NodeTransformer
+from . import nodes
+from .visitor import NodeTransformer
def optimize(node, environment):
@@ -28,22 +20,22 @@ def optimize(node, environment):
class Optimizer(NodeTransformer):
-
def __init__(self, environment):
self.environment = environment
- def fold(self, node, eval_ctx=None):
- """Do constant folding."""
- node = self.generic_visit(node)
- try:
- return nodes.Const.from_untrusted(node.as_const(eval_ctx),
- lineno=node.lineno,
- environment=self.environment)
- except nodes.Impossible:
- return node
-
- visit_Add = visit_Sub = visit_Mul = visit_Div = visit_FloorDiv = \
- visit_Pow = visit_Mod = visit_And = visit_Or = visit_Pos = visit_Neg = \
- visit_Not = visit_Compare = visit_Getitem = visit_Getattr = visit_Call = \
- visit_Filter = visit_Test = visit_CondExpr = fold
- del fold
+ def generic_visit(self, node, *args, **kwargs):
+ node = super(Optimizer, self).generic_visit(node, *args, **kwargs)
+
+ # Do constant folding. Some other nodes besides Expr have
+ # as_const, but folding them causes errors later on.
+ if isinstance(node, nodes.Expr):
+ try:
+ return nodes.Const.from_untrusted(
+ node.as_const(args[0] if args else None),
+ lineno=node.lineno,
+ environment=self.environment,
+ )
+ except nodes.Impossible:
+ pass
+
+ return node
diff --git a/lib/spack/external/jinja2/parser.py b/lib/spack/external/jinja2/parser.py
index ed00d9708e..d5881066f7 100644
--- a/lib/spack/external/jinja2/parser.py
+++ b/lib/spack/external/jinja2/parser.py
@@ -1,41 +1,46 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.parser
- ~~~~~~~~~~~~~
-
- Implements the template parser.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-from jinja2 import nodes
-from jinja2.exceptions import TemplateSyntaxError, TemplateAssertionError
-from jinja2.lexer import describe_token, describe_token_expr
-from jinja2._compat import imap
-
-
-_statement_keywords = frozenset(['for', 'if', 'block', 'extends', 'print',
- 'macro', 'include', 'from', 'import',
- 'set', 'with', 'autoescape'])
-_compare_operators = frozenset(['eq', 'ne', 'lt', 'lteq', 'gt', 'gteq'])
+"""Parse tokens from the lexer into nodes for the compiler."""
+from . import nodes
+from ._compat import imap
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateSyntaxError
+from .lexer import describe_token
+from .lexer import describe_token_expr
+
+_statement_keywords = frozenset(
+ [
+ "for",
+ "if",
+ "block",
+ "extends",
+ "print",
+ "macro",
+ "include",
+ "from",
+ "import",
+ "set",
+ "with",
+ "autoescape",
+ ]
+)
+_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
_math_nodes = {
- 'add': nodes.Add,
- 'sub': nodes.Sub,
- 'mul': nodes.Mul,
- 'div': nodes.Div,
- 'floordiv': nodes.FloorDiv,
- 'mod': nodes.Mod,
+ "add": nodes.Add,
+ "sub": nodes.Sub,
+ "mul": nodes.Mul,
+ "div": nodes.Div,
+ "floordiv": nodes.FloorDiv,
+ "mod": nodes.Mod,
}
class Parser(object):
- """This is the central parsing class Jinja2 uses. It's passed to
+ """This is the central parsing class Jinja uses. It's passed to
extensions and can be used to parse expressions or statements.
"""
- def __init__(self, environment, source, name=None, filename=None,
- state=None):
+ def __init__(self, environment, source, name=None, filename=None, state=None):
self.environment = environment
self.stream = environment._tokenize(source, name, filename, state)
self.name = name
@@ -63,31 +68,37 @@ class Parser(object):
for exprs in end_token_stack:
expected.extend(imap(describe_token_expr, exprs))
if end_token_stack:
- currently_looking = ' or '.join(
- "'%s'" % describe_token_expr(expr)
- for expr in end_token_stack[-1])
+ currently_looking = " or ".join(
+ "'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1]
+ )
else:
currently_looking = None
if name is None:
- message = ['Unexpected end of template.']
+ message = ["Unexpected end of template."]
else:
- message = ['Encountered unknown tag \'%s\'.' % name]
+ message = ["Encountered unknown tag '%s'." % name]
if currently_looking:
if name is not None and name in expected:
- message.append('You probably made a nesting mistake. Jinja '
- 'is expecting this tag, but currently looking '
- 'for %s.' % currently_looking)
+ message.append(
+ "You probably made a nesting mistake. Jinja "
+ "is expecting this tag, but currently looking "
+ "for %s." % currently_looking
+ )
else:
- message.append('Jinja was looking for the following tags: '
- '%s.' % currently_looking)
+ message.append(
+ "Jinja was looking for the following tags: "
+ "%s." % currently_looking
+ )
if self._tag_stack:
- message.append('The innermost block that needs to be '
- 'closed is \'%s\'.' % self._tag_stack[-1])
+ message.append(
+ "The innermost block that needs to be "
+ "closed is '%s'." % self._tag_stack[-1]
+ )
- self.fail(' '.join(message), lineno)
+ self.fail(" ".join(message), lineno)
def fail_unknown_tag(self, name, lineno=None):
"""Called if the parser encounters an unknown tag. Tries to fail
@@ -105,7 +116,7 @@ class Parser(object):
def is_tuple_end(self, extra_end_rules=None):
"""Are we at the end of a tuple?"""
- if self.stream.current.type in ('variable_end', 'block_end', 'rparen'):
+ if self.stream.current.type in ("variable_end", "block_end", "rparen"):
return True
elif extra_end_rules is not None:
return self.stream.current.test_any(extra_end_rules)
@@ -115,22 +126,22 @@ class Parser(object):
"""Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
self._last_identifier += 1
rv = object.__new__(nodes.InternalName)
- nodes.Node.__init__(rv, 'fi%d' % self._last_identifier, lineno=lineno)
+ nodes.Node.__init__(rv, "fi%d" % self._last_identifier, lineno=lineno)
return rv
def parse_statement(self):
"""Parse a single statement."""
token = self.stream.current
- if token.type != 'name':
- self.fail('tag name expected', token.lineno)
+ if token.type != "name":
+ self.fail("tag name expected", token.lineno)
self._tag_stack.append(token.value)
pop_tag = True
try:
if token.value in _statement_keywords:
- return getattr(self, 'parse_' + self.stream.current.value)()
- if token.value == 'call':
+ return getattr(self, "parse_" + self.stream.current.value)()
+ if token.value == "call":
return self.parse_call_block()
- if token.value == 'filter':
+ if token.value == "filter":
return self.parse_filter_block()
ext = self.extensions.get(token.value)
if ext is not None:
@@ -157,16 +168,16 @@ class Parser(object):
can be set to `True` and the end token is removed.
"""
# the first token may be a colon for python compatibility
- self.stream.skip_if('colon')
+ self.stream.skip_if("colon")
# in the future it would be possible to add whole code sections
# by adding some sort of end of statement token and parsing those here.
- self.stream.expect('block_end')
+ self.stream.expect("block_end")
result = self.subparse(end_tokens)
# we reached the end of the template too early, the subparser
# does not check for this, so we do that now
- if self.stream.current.type == 'eof':
+ if self.stream.current.type == "eof":
self.fail_eof(end_tokens)
if drop_needle:
@@ -177,50 +188,47 @@ class Parser(object):
"""Parse an assign statement."""
lineno = next(self.stream).lineno
target = self.parse_assign_target(with_namespace=True)
- if self.stream.skip_if('assign'):
+ if self.stream.skip_if("assign"):
expr = self.parse_tuple()
return nodes.Assign(target, expr, lineno=lineno)
filter_node = self.parse_filter(None)
- body = self.parse_statements(('name:endset',),
- drop_needle=True)
+ body = self.parse_statements(("name:endset",), drop_needle=True)
return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
def parse_for(self):
"""Parse a for loop."""
- lineno = self.stream.expect('name:for').lineno
- target = self.parse_assign_target(extra_end_rules=('name:in',))
- self.stream.expect('name:in')
- iter = self.parse_tuple(with_condexpr=False,
- extra_end_rules=('name:recursive',))
+ lineno = self.stream.expect("name:for").lineno
+ target = self.parse_assign_target(extra_end_rules=("name:in",))
+ self.stream.expect("name:in")
+ iter = self.parse_tuple(
+ with_condexpr=False, extra_end_rules=("name:recursive",)
+ )
test = None
- if self.stream.skip_if('name:if'):
+ if self.stream.skip_if("name:if"):
test = self.parse_expression()
- recursive = self.stream.skip_if('name:recursive')
- body = self.parse_statements(('name:endfor', 'name:else'))
- if next(self.stream).value == 'endfor':
+ recursive = self.stream.skip_if("name:recursive")
+ body = self.parse_statements(("name:endfor", "name:else"))
+ if next(self.stream).value == "endfor":
else_ = []
else:
- else_ = self.parse_statements(('name:endfor',), drop_needle=True)
- return nodes.For(target, iter, body, else_, test,
- recursive, lineno=lineno)
+ else_ = self.parse_statements(("name:endfor",), drop_needle=True)
+ return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)
def parse_if(self):
"""Parse an if construct."""
- node = result = nodes.If(lineno=self.stream.expect('name:if').lineno)
+ node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
while 1:
node.test = self.parse_tuple(with_condexpr=False)
- node.body = self.parse_statements(('name:elif', 'name:else',
- 'name:endif'))
+ node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
node.elif_ = []
node.else_ = []
token = next(self.stream)
- if token.test('name:elif'):
+ if token.test("name:elif"):
node = nodes.If(lineno=self.stream.current.lineno)
result.elif_.append(node)
continue
- elif token.test('name:else'):
- result.else_ = self.parse_statements(('name:endif',),
- drop_needle=True)
+ elif token.test("name:else"):
+ result.else_ = self.parse_statements(("name:endif",), drop_needle=True)
break
return result
@@ -228,45 +236,42 @@ class Parser(object):
node = nodes.With(lineno=next(self.stream).lineno)
targets = []
values = []
- while self.stream.current.type != 'block_end':
- lineno = self.stream.current.lineno
+ while self.stream.current.type != "block_end":
if targets:
- self.stream.expect('comma')
+ self.stream.expect("comma")
target = self.parse_assign_target()
- target.set_ctx('param')
+ target.set_ctx("param")
targets.append(target)
- self.stream.expect('assign')
+ self.stream.expect("assign")
values.append(self.parse_expression())
node.targets = targets
node.values = values
- node.body = self.parse_statements(('name:endwith',),
- drop_needle=True)
+ node.body = self.parse_statements(("name:endwith",), drop_needle=True)
return node
def parse_autoescape(self):
node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
- node.options = [
- nodes.Keyword('autoescape', self.parse_expression())
- ]
- node.body = self.parse_statements(('name:endautoescape',),
- drop_needle=True)
+ node.options = [nodes.Keyword("autoescape", self.parse_expression())]
+ node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
return nodes.Scope([node])
def parse_block(self):
node = nodes.Block(lineno=next(self.stream).lineno)
- node.name = self.stream.expect('name').value
- node.scoped = self.stream.skip_if('name:scoped')
+ node.name = self.stream.expect("name").value
+ node.scoped = self.stream.skip_if("name:scoped")
# common problem people encounter when switching from django
# to jinja. we do not support hyphens in block names, so let's
# raise a nicer error message in that case.
- if self.stream.current.type == 'sub':
- self.fail('Block names in Jinja have to be valid Python '
- 'identifiers and may not contain hyphens, use an '
- 'underscore instead.')
-
- node.body = self.parse_statements(('name:endblock',), drop_needle=True)
- self.stream.skip_if('name:' + node.name)
+ if self.stream.current.type == "sub":
+ self.fail(
+ "Block names in Jinja have to be valid Python "
+ "identifiers and may not contain hyphens, use an "
+ "underscore instead."
+ )
+
+ node.body = self.parse_statements(("name:endblock",), drop_needle=True)
+ self.stream.skip_if("name:" + node.name)
return node
def parse_extends(self):
@@ -275,9 +280,10 @@ class Parser(object):
return node
def parse_import_context(self, node, default):
- if self.stream.current.test_any('name:with', 'name:without') and \
- self.stream.look().test('name:context'):
- node.with_context = next(self.stream).value == 'with'
+ if self.stream.current.test_any(
+ "name:with", "name:without"
+ ) and self.stream.look().test("name:context"):
+ node.with_context = next(self.stream).value == "with"
self.stream.skip()
else:
node.with_context = default
@@ -286,8 +292,9 @@ class Parser(object):
def parse_include(self):
node = nodes.Include(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
- if self.stream.current.test('name:ignore') and \
- self.stream.look().test('name:missing'):
+ if self.stream.current.test("name:ignore") and self.stream.look().test(
+ "name:missing"
+ ):
node.ignore_missing = True
self.stream.skip(2)
else:
@@ -297,67 +304,71 @@ class Parser(object):
def parse_import(self):
node = nodes.Import(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
- self.stream.expect('name:as')
+ self.stream.expect("name:as")
node.target = self.parse_assign_target(name_only=True).name
return self.parse_import_context(node, False)
def parse_from(self):
node = nodes.FromImport(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
- self.stream.expect('name:import')
+ self.stream.expect("name:import")
node.names = []
def parse_context():
- if self.stream.current.value in ('with', 'without') and \
- self.stream.look().test('name:context'):
- node.with_context = next(self.stream).value == 'with'
+ if self.stream.current.value in (
+ "with",
+ "without",
+ ) and self.stream.look().test("name:context"):
+ node.with_context = next(self.stream).value == "with"
self.stream.skip()
return True
return False
while 1:
if node.names:
- self.stream.expect('comma')
- if self.stream.current.type == 'name':
+ self.stream.expect("comma")
+ if self.stream.current.type == "name":
if parse_context():
break
target = self.parse_assign_target(name_only=True)
- if target.name.startswith('_'):
- self.fail('names starting with an underline can not '
- 'be imported', target.lineno,
- exc=TemplateAssertionError)
- if self.stream.skip_if('name:as'):
+ if target.name.startswith("_"):
+ self.fail(
+ "names starting with an underline can not be imported",
+ target.lineno,
+ exc=TemplateAssertionError,
+ )
+ if self.stream.skip_if("name:as"):
alias = self.parse_assign_target(name_only=True)
node.names.append((target.name, alias.name))
else:
node.names.append(target.name)
- if parse_context() or self.stream.current.type != 'comma':
+ if parse_context() or self.stream.current.type != "comma":
break
else:
- self.stream.expect('name')
- if not hasattr(node, 'with_context'):
+ self.stream.expect("name")
+ if not hasattr(node, "with_context"):
node.with_context = False
return node
def parse_signature(self, node):
node.args = args = []
node.defaults = defaults = []
- self.stream.expect('lparen')
- while self.stream.current.type != 'rparen':
+ self.stream.expect("lparen")
+ while self.stream.current.type != "rparen":
if args:
- self.stream.expect('comma')
+ self.stream.expect("comma")
arg = self.parse_assign_target(name_only=True)
- arg.set_ctx('param')
- if self.stream.skip_if('assign'):
+ arg.set_ctx("param")
+ if self.stream.skip_if("assign"):
defaults.append(self.parse_expression())
elif defaults:
- self.fail('non-default argument follows default argument')
+ self.fail("non-default argument follows default argument")
args.append(arg)
- self.stream.expect('rparen')
+ self.stream.expect("rparen")
def parse_call_block(self):
node = nodes.CallBlock(lineno=next(self.stream).lineno)
- if self.stream.current.type == 'lparen':
+ if self.stream.current.type == "lparen":
self.parse_signature(node)
else:
node.args = []
@@ -365,37 +376,40 @@ class Parser(object):
node.call = self.parse_expression()
if not isinstance(node.call, nodes.Call):
- self.fail('expected call', node.lineno)
- node.body = self.parse_statements(('name:endcall',), drop_needle=True)
+ self.fail("expected call", node.lineno)
+ node.body = self.parse_statements(("name:endcall",), drop_needle=True)
return node
def parse_filter_block(self):
node = nodes.FilterBlock(lineno=next(self.stream).lineno)
node.filter = self.parse_filter(None, start_inline=True)
- node.body = self.parse_statements(('name:endfilter',),
- drop_needle=True)
+ node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
return node
def parse_macro(self):
node = nodes.Macro(lineno=next(self.stream).lineno)
node.name = self.parse_assign_target(name_only=True).name
self.parse_signature(node)
- node.body = self.parse_statements(('name:endmacro',),
- drop_needle=True)
+ node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
return node
def parse_print(self):
node = nodes.Output(lineno=next(self.stream).lineno)
node.nodes = []
- while self.stream.current.type != 'block_end':
+ while self.stream.current.type != "block_end":
if node.nodes:
- self.stream.expect('comma')
+ self.stream.expect("comma")
node.nodes.append(self.parse_expression())
return node
- def parse_assign_target(self, with_tuple=True, name_only=False,
- extra_end_rules=None, with_namespace=False):
- """Parse an assignment target. As Jinja2 allows assignments to
+ def parse_assign_target(
+ self,
+ with_tuple=True,
+ name_only=False,
+ extra_end_rules=None,
+ with_namespace=False,
+ ):
+ """Parse an assignment target. As Jinja allows assignments to
tuples, this function can parse all allowed assignment targets. Per
default assignments to tuples are parsed, that can be disable however
by setting `with_tuple` to `False`. If only assignments to names are
@@ -403,24 +417,26 @@ class Parser(object):
parameter is forwarded to the tuple parsing function. If
`with_namespace` is enabled, a namespace assignment may be parsed.
"""
- if with_namespace and self.stream.look().type == 'dot':
- token = self.stream.expect('name')
+ if with_namespace and self.stream.look().type == "dot":
+ token = self.stream.expect("name")
next(self.stream) # dot
- attr = self.stream.expect('name')
+ attr = self.stream.expect("name")
target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
elif name_only:
- token = self.stream.expect('name')
- target = nodes.Name(token.value, 'store', lineno=token.lineno)
+ token = self.stream.expect("name")
+ target = nodes.Name(token.value, "store", lineno=token.lineno)
else:
if with_tuple:
- target = self.parse_tuple(simplified=True,
- extra_end_rules=extra_end_rules)
+ target = self.parse_tuple(
+ simplified=True, extra_end_rules=extra_end_rules
+ )
else:
target = self.parse_primary()
- target.set_ctx('store')
+ target.set_ctx("store")
if not target.can_assign():
- self.fail('can\'t assign to %r' % target.__class__.
- __name__.lower(), target.lineno)
+ self.fail(
+ "can't assign to %r" % target.__class__.__name__.lower(), target.lineno
+ )
return target
def parse_expression(self, with_condexpr=True):
@@ -435,9 +451,9 @@ class Parser(object):
def parse_condexpr(self):
lineno = self.stream.current.lineno
expr1 = self.parse_or()
- while self.stream.skip_if('name:if'):
+ while self.stream.skip_if("name:if"):
expr2 = self.parse_or()
- if self.stream.skip_if('name:else'):
+ if self.stream.skip_if("name:else"):
expr3 = self.parse_condexpr()
else:
expr3 = None
@@ -448,7 +464,7 @@ class Parser(object):
def parse_or(self):
lineno = self.stream.current.lineno
left = self.parse_and()
- while self.stream.skip_if('name:or'):
+ while self.stream.skip_if("name:or"):
right = self.parse_and()
left = nodes.Or(left, right, lineno=lineno)
lineno = self.stream.current.lineno
@@ -457,14 +473,14 @@ class Parser(object):
def parse_and(self):
lineno = self.stream.current.lineno
left = self.parse_not()
- while self.stream.skip_if('name:and'):
+ while self.stream.skip_if("name:and"):
right = self.parse_not()
left = nodes.And(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_not(self):
- if self.stream.current.test('name:not'):
+ if self.stream.current.test("name:not"):
lineno = next(self.stream).lineno
return nodes.Not(self.parse_not(), lineno=lineno)
return self.parse_compare()
@@ -478,12 +494,13 @@ class Parser(object):
if token_type in _compare_operators:
next(self.stream)
ops.append(nodes.Operand(token_type, self.parse_math1()))
- elif self.stream.skip_if('name:in'):
- ops.append(nodes.Operand('in', self.parse_math1()))
- elif (self.stream.current.test('name:not') and
- self.stream.look().test('name:in')):
+ elif self.stream.skip_if("name:in"):
+ ops.append(nodes.Operand("in", self.parse_math1()))
+ elif self.stream.current.test("name:not") and self.stream.look().test(
+ "name:in"
+ ):
self.stream.skip(2)
- ops.append(nodes.Operand('notin', self.parse_math1()))
+ ops.append(nodes.Operand("notin", self.parse_math1()))
else:
break
lineno = self.stream.current.lineno
@@ -494,7 +511,7 @@ class Parser(object):
def parse_math1(self):
lineno = self.stream.current.lineno
left = self.parse_concat()
- while self.stream.current.type in ('add', 'sub'):
+ while self.stream.current.type in ("add", "sub"):
cls = _math_nodes[self.stream.current.type]
next(self.stream)
right = self.parse_concat()
@@ -505,7 +522,7 @@ class Parser(object):
def parse_concat(self):
lineno = self.stream.current.lineno
args = [self.parse_math2()]
- while self.stream.current.type == 'tilde':
+ while self.stream.current.type == "tilde":
next(self.stream)
args.append(self.parse_math2())
if len(args) == 1:
@@ -515,7 +532,7 @@ class Parser(object):
def parse_math2(self):
lineno = self.stream.current.lineno
left = self.parse_pow()
- while self.stream.current.type in ('mul', 'div', 'floordiv', 'mod'):
+ while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
cls = _math_nodes[self.stream.current.type]
next(self.stream)
right = self.parse_pow()
@@ -526,7 +543,7 @@ class Parser(object):
def parse_pow(self):
lineno = self.stream.current.lineno
left = self.parse_unary()
- while self.stream.current.type == 'pow':
+ while self.stream.current.type == "pow":
next(self.stream)
right = self.parse_unary()
left = nodes.Pow(left, right, lineno=lineno)
@@ -536,10 +553,10 @@ class Parser(object):
def parse_unary(self, with_filter=True):
token_type = self.stream.current.type
lineno = self.stream.current.lineno
- if token_type == 'sub':
+ if token_type == "sub":
next(self.stream)
node = nodes.Neg(self.parse_unary(False), lineno=lineno)
- elif token_type == 'add':
+ elif token_type == "add":
next(self.stream)
node = nodes.Pos(self.parse_unary(False), lineno=lineno)
else:
@@ -551,40 +568,44 @@ class Parser(object):
def parse_primary(self):
token = self.stream.current
- if token.type == 'name':
- if token.value in ('true', 'false', 'True', 'False'):
- node = nodes.Const(token.value in ('true', 'True'),
- lineno=token.lineno)
- elif token.value in ('none', 'None'):
+ if token.type == "name":
+ if token.value in ("true", "false", "True", "False"):
+ node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
+ elif token.value in ("none", "None"):
node = nodes.Const(None, lineno=token.lineno)
else:
- node = nodes.Name(token.value, 'load', lineno=token.lineno)
+ node = nodes.Name(token.value, "load", lineno=token.lineno)
next(self.stream)
- elif token.type == 'string':
+ elif token.type == "string":
next(self.stream)
buf = [token.value]
lineno = token.lineno
- while self.stream.current.type == 'string':
+ while self.stream.current.type == "string":
buf.append(self.stream.current.value)
next(self.stream)
- node = nodes.Const(''.join(buf), lineno=lineno)
- elif token.type in ('integer', 'float'):
+ node = nodes.Const("".join(buf), lineno=lineno)
+ elif token.type in ("integer", "float"):
next(self.stream)
node = nodes.Const(token.value, lineno=token.lineno)
- elif token.type == 'lparen':
+ elif token.type == "lparen":
next(self.stream)
node = self.parse_tuple(explicit_parentheses=True)
- self.stream.expect('rparen')
- elif token.type == 'lbracket':
+ self.stream.expect("rparen")
+ elif token.type == "lbracket":
node = self.parse_list()
- elif token.type == 'lbrace':
+ elif token.type == "lbrace":
node = self.parse_dict()
else:
self.fail("unexpected '%s'" % describe_token(token), token.lineno)
return node
- def parse_tuple(self, simplified=False, with_condexpr=True,
- extra_end_rules=None, explicit_parentheses=False):
+ def parse_tuple(
+ self,
+ simplified=False,
+ with_condexpr=True,
+ extra_end_rules=None,
+ explicit_parentheses=False,
+ ):
"""Works like `parse_expression` but if multiple expressions are
delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
This method could also return a regular expression instead of a tuple
@@ -609,16 +630,19 @@ class Parser(object):
elif with_condexpr:
parse = self.parse_expression
else:
- parse = lambda: self.parse_expression(with_condexpr=False)
+
+ def parse():
+ return self.parse_expression(with_condexpr=False)
+
args = []
is_tuple = False
while 1:
if args:
- self.stream.expect('comma')
+ self.stream.expect("comma")
if self.is_tuple_end(extra_end_rules):
break
args.append(parse())
- if self.stream.current.type == 'comma':
+ if self.stream.current.type == "comma":
is_tuple = True
else:
break
@@ -633,46 +657,48 @@ class Parser(object):
# nothing) in the spot of an expression would be an empty
# tuple.
if not explicit_parentheses:
- self.fail('Expected an expression, got \'%s\'' %
- describe_token(self.stream.current))
+ self.fail(
+ "Expected an expression, got '%s'"
+ % describe_token(self.stream.current)
+ )
- return nodes.Tuple(args, 'load', lineno=lineno)
+ return nodes.Tuple(args, "load", lineno=lineno)
def parse_list(self):
- token = self.stream.expect('lbracket')
+ token = self.stream.expect("lbracket")
items = []
- while self.stream.current.type != 'rbracket':
+ while self.stream.current.type != "rbracket":
if items:
- self.stream.expect('comma')
- if self.stream.current.type == 'rbracket':
+ self.stream.expect("comma")
+ if self.stream.current.type == "rbracket":
break
items.append(self.parse_expression())
- self.stream.expect('rbracket')
+ self.stream.expect("rbracket")
return nodes.List(items, lineno=token.lineno)
def parse_dict(self):
- token = self.stream.expect('lbrace')
+ token = self.stream.expect("lbrace")
items = []
- while self.stream.current.type != 'rbrace':
+ while self.stream.current.type != "rbrace":
if items:
- self.stream.expect('comma')
- if self.stream.current.type == 'rbrace':
+ self.stream.expect("comma")
+ if self.stream.current.type == "rbrace":
break
key = self.parse_expression()
- self.stream.expect('colon')
+ self.stream.expect("colon")
value = self.parse_expression()
items.append(nodes.Pair(key, value, lineno=key.lineno))
- self.stream.expect('rbrace')
+ self.stream.expect("rbrace")
return nodes.Dict(items, lineno=token.lineno)
def parse_postfix(self, node):
while 1:
token_type = self.stream.current.type
- if token_type == 'dot' or token_type == 'lbracket':
+ if token_type == "dot" or token_type == "lbracket":
node = self.parse_subscript(node)
# calls are valid both after postfix expressions (getattr
# and getitem) as well as filters and tests
- elif token_type == 'lparen':
+ elif token_type == "lparen":
node = self.parse_call(node)
else:
break
@@ -681,13 +707,13 @@ class Parser(object):
def parse_filter_expr(self, node):
while 1:
token_type = self.stream.current.type
- if token_type == 'pipe':
+ if token_type == "pipe":
node = self.parse_filter(node)
- elif token_type == 'name' and self.stream.current.value == 'is':
+ elif token_type == "name" and self.stream.current.value == "is":
node = self.parse_test(node)
# calls are valid both after postfix expressions (getattr
# and getitem) as well as filters and tests
- elif token_type == 'lparen':
+ elif token_type == "lparen":
node = self.parse_call(node)
else:
break
@@ -695,53 +721,54 @@ class Parser(object):
def parse_subscript(self, node):
token = next(self.stream)
- if token.type == 'dot':
+ if token.type == "dot":
attr_token = self.stream.current
next(self.stream)
- if attr_token.type == 'name':
- return nodes.Getattr(node, attr_token.value, 'load',
- lineno=token.lineno)
- elif attr_token.type != 'integer':
- self.fail('expected name or number', attr_token.lineno)
+ if attr_token.type == "name":
+ return nodes.Getattr(
+ node, attr_token.value, "load", lineno=token.lineno
+ )
+ elif attr_token.type != "integer":
+ self.fail("expected name or number", attr_token.lineno)
arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
- return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
- if token.type == 'lbracket':
+ return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+ if token.type == "lbracket":
args = []
- while self.stream.current.type != 'rbracket':
+ while self.stream.current.type != "rbracket":
if args:
- self.stream.expect('comma')
+ self.stream.expect("comma")
args.append(self.parse_subscribed())
- self.stream.expect('rbracket')
+ self.stream.expect("rbracket")
if len(args) == 1:
arg = args[0]
else:
- arg = nodes.Tuple(args, 'load', lineno=token.lineno)
- return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
- self.fail('expected subscript expression', self.lineno)
+ arg = nodes.Tuple(args, "load", lineno=token.lineno)
+ return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+ self.fail("expected subscript expression", token.lineno)
def parse_subscribed(self):
lineno = self.stream.current.lineno
- if self.stream.current.type == 'colon':
+ if self.stream.current.type == "colon":
next(self.stream)
args = [None]
else:
node = self.parse_expression()
- if self.stream.current.type != 'colon':
+ if self.stream.current.type != "colon":
return node
next(self.stream)
args = [node]
- if self.stream.current.type == 'colon':
+ if self.stream.current.type == "colon":
args.append(None)
- elif self.stream.current.type not in ('rbracket', 'comma'):
+ elif self.stream.current.type not in ("rbracket", "comma"):
args.append(self.parse_expression())
else:
args.append(None)
- if self.stream.current.type == 'colon':
+ if self.stream.current.type == "colon":
next(self.stream)
- if self.stream.current.type not in ('rbracket', 'comma'):
+ if self.stream.current.type not in ("rbracket", "comma"):
args.append(self.parse_expression())
else:
args.append(None)
@@ -751,7 +778,7 @@ class Parser(object):
return nodes.Slice(lineno=lineno, *args)
def parse_call(self, node):
- token = self.stream.expect('lparen')
+ token = self.stream.expect("lparen")
args = []
kwargs = []
dyn_args = dyn_kwargs = None
@@ -759,91 +786,100 @@ class Parser(object):
def ensure(expr):
if not expr:
- self.fail('invalid syntax for function call expression',
- token.lineno)
+ self.fail("invalid syntax for function call expression", token.lineno)
- while self.stream.current.type != 'rparen':
+ while self.stream.current.type != "rparen":
if require_comma:
- self.stream.expect('comma')
+ self.stream.expect("comma")
# support for trailing comma
- if self.stream.current.type == 'rparen':
+ if self.stream.current.type == "rparen":
break
- if self.stream.current.type == 'mul':
+ if self.stream.current.type == "mul":
ensure(dyn_args is None and dyn_kwargs is None)
next(self.stream)
dyn_args = self.parse_expression()
- elif self.stream.current.type == 'pow':
+ elif self.stream.current.type == "pow":
ensure(dyn_kwargs is None)
next(self.stream)
dyn_kwargs = self.parse_expression()
else:
- ensure(dyn_args is None and dyn_kwargs is None)
- if self.stream.current.type == 'name' and \
- self.stream.look().type == 'assign':
+ if (
+ self.stream.current.type == "name"
+ and self.stream.look().type == "assign"
+ ):
+ # Parsing a kwarg
+ ensure(dyn_kwargs is None)
key = self.stream.current.value
self.stream.skip(2)
value = self.parse_expression()
- kwargs.append(nodes.Keyword(key, value,
- lineno=value.lineno))
+ kwargs.append(nodes.Keyword(key, value, lineno=value.lineno))
else:
- ensure(not kwargs)
+ # Parsing an arg
+ ensure(dyn_args is None and dyn_kwargs is None and not kwargs)
args.append(self.parse_expression())
require_comma = True
- self.stream.expect('rparen')
+ self.stream.expect("rparen")
if node is None:
return args, kwargs, dyn_args, dyn_kwargs
- return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs,
- lineno=token.lineno)
+ return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
def parse_filter(self, node, start_inline=False):
- while self.stream.current.type == 'pipe' or start_inline:
+ while self.stream.current.type == "pipe" or start_inline:
if not start_inline:
next(self.stream)
- token = self.stream.expect('name')
+ token = self.stream.expect("name")
name = token.value
- while self.stream.current.type == 'dot':
+ while self.stream.current.type == "dot":
next(self.stream)
- name += '.' + self.stream.expect('name').value
- if self.stream.current.type == 'lparen':
+ name += "." + self.stream.expect("name").value
+ if self.stream.current.type == "lparen":
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
else:
args = []
kwargs = []
dyn_args = dyn_kwargs = None
- node = nodes.Filter(node, name, args, kwargs, dyn_args,
- dyn_kwargs, lineno=token.lineno)
+ node = nodes.Filter(
+ node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+ )
start_inline = False
return node
def parse_test(self, node):
token = next(self.stream)
- if self.stream.current.test('name:not'):
+ if self.stream.current.test("name:not"):
next(self.stream)
negated = True
else:
negated = False
- name = self.stream.expect('name').value
- while self.stream.current.type == 'dot':
+ name = self.stream.expect("name").value
+ while self.stream.current.type == "dot":
next(self.stream)
- name += '.' + self.stream.expect('name').value
+ name += "." + self.stream.expect("name").value
dyn_args = dyn_kwargs = None
kwargs = []
- if self.stream.current.type == 'lparen':
+ if self.stream.current.type == "lparen":
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
- elif (self.stream.current.type in ('name', 'string', 'integer',
- 'float', 'lparen', 'lbracket',
- 'lbrace') and not
- self.stream.current.test_any('name:else', 'name:or',
- 'name:and')):
- if self.stream.current.test('name:is'):
- self.fail('You cannot chain multiple tests with is')
- args = [self.parse_primary()]
+ elif self.stream.current.type in (
+ "name",
+ "string",
+ "integer",
+ "float",
+ "lparen",
+ "lbracket",
+ "lbrace",
+ ) and not self.stream.current.test_any("name:else", "name:or", "name:and"):
+ if self.stream.current.test("name:is"):
+ self.fail("You cannot chain multiple tests with is")
+ arg_node = self.parse_primary()
+ arg_node = self.parse_postfix(arg_node)
+ args = [arg_node]
else:
args = []
- node = nodes.Test(node, name, args, kwargs, dyn_args,
- dyn_kwargs, lineno=token.lineno)
+ node = nodes.Test(
+ node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+ )
if negated:
node = nodes.Not(node, lineno=token.lineno)
return node
@@ -865,29 +901,29 @@ class Parser(object):
try:
while self.stream:
token = self.stream.current
- if token.type == 'data':
+ if token.type == "data":
if token.value:
- add_data(nodes.TemplateData(token.value,
- lineno=token.lineno))
+ add_data(nodes.TemplateData(token.value, lineno=token.lineno))
next(self.stream)
- elif token.type == 'variable_begin':
+ elif token.type == "variable_begin":
next(self.stream)
add_data(self.parse_tuple(with_condexpr=True))
- self.stream.expect('variable_end')
- elif token.type == 'block_begin':
+ self.stream.expect("variable_end")
+ elif token.type == "block_begin":
flush_data()
next(self.stream)
- if end_tokens is not None and \
- self.stream.current.test_any(*end_tokens):
+ if end_tokens is not None and self.stream.current.test_any(
+ *end_tokens
+ ):
return body
rv = self.parse_statement()
if isinstance(rv, list):
body.extend(rv)
else:
body.append(rv)
- self.stream.expect('block_end')
+ self.stream.expect("block_end")
else:
- raise AssertionError('internal parsing error')
+ raise AssertionError("internal parsing error")
flush_data()
finally:
diff --git a/lib/spack/external/jinja2/runtime.py b/lib/spack/external/jinja2/runtime.py
index 52dfeaebd6..3ad7968624 100644
--- a/lib/spack/external/jinja2/runtime.py
+++ b/lib/spack/external/jinja2/runtime.py
@@ -1,43 +1,62 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.runtime
- ~~~~~~~~~~~~~~
-
- Runtime helpers.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD.
-"""
+"""The runtime functions and state used by compiled templates."""
import sys
-
from itertools import chain
from types import MethodType
-from jinja2.nodes import EvalContext, _context_function_types
-from jinja2.utils import Markup, soft_unicode, escape, missing, concat, \
- internalcode, object_type_repr, evalcontextfunction, Namespace
-from jinja2.exceptions import UndefinedError, TemplateRuntimeError, \
- TemplateNotFound
-from jinja2._compat import imap, text_type, iteritems, \
- implements_iterator, implements_to_string, string_types, PY2, \
- with_metaclass
-
+from markupsafe import escape # noqa: F401
+from markupsafe import Markup
+from markupsafe import soft_unicode
+
+from ._compat import abc
+from ._compat import imap
+from ._compat import implements_iterator
+from ._compat import implements_to_string
+from ._compat import iteritems
+from ._compat import PY2
+from ._compat import string_types
+from ._compat import text_type
+from ._compat import with_metaclass
+from .exceptions import TemplateNotFound # noqa: F401
+from .exceptions import TemplateRuntimeError # noqa: F401
+from .exceptions import UndefinedError
+from .nodes import EvalContext
+from .utils import concat
+from .utils import evalcontextfunction
+from .utils import internalcode
+from .utils import missing
+from .utils import Namespace # noqa: F401
+from .utils import object_type_repr
# these variables are exported to the template runtime
-__all__ = ['LoopContext', 'TemplateReference', 'Macro', 'Markup',
- 'TemplateRuntimeError', 'missing', 'concat', 'escape',
- 'markup_join', 'unicode_join', 'to_string', 'identity',
- 'TemplateNotFound', 'Namespace']
+exported = [
+ "LoopContext",
+ "TemplateReference",
+ "Macro",
+ "Markup",
+ "TemplateRuntimeError",
+ "missing",
+ "concat",
+ "escape",
+ "markup_join",
+ "unicode_join",
+ "to_string",
+ "identity",
+ "TemplateNotFound",
+ "Namespace",
+ "Undefined",
+]
#: the name of the function that is used to convert something into
#: a string. We can just use the text type here.
to_string = text_type
-#: the identity function. Useful for certain things in the environment
-identity = lambda x: x
-_first_iteration = object()
-_last_iteration = object()
+def identity(x):
+ """Returns its argument. Useful for certain things in the
+ environment.
+ """
+ return x
def markup_join(seq):
@@ -46,8 +65,8 @@ def markup_join(seq):
iterator = imap(soft_unicode, seq)
for arg in iterator:
buf.append(arg)
- if hasattr(arg, '__html__'):
- return Markup(u'').join(chain(buf, iterator))
+ if hasattr(arg, "__html__"):
+ return Markup(u"").join(chain(buf, iterator))
return concat(buf)
@@ -56,9 +75,16 @@ def unicode_join(seq):
return concat(imap(text_type, seq))
-def new_context(environment, template_name, blocks, vars=None,
- shared=None, globals=None, locals=None):
- """Internal helper to for context creation."""
+def new_context(
+ environment,
+ template_name,
+ blocks,
+ vars=None,
+ shared=None,
+ globals=None,
+ locals=None,
+):
+ """Internal helper for context creation."""
if vars is None:
vars = {}
if shared:
@@ -73,8 +99,7 @@ def new_context(environment, template_name, blocks, vars=None,
for key, value in iteritems(locals):
if value is not missing:
parent[key] = value
- return environment.context_class(environment, parent, template_name,
- blocks)
+ return environment.context_class(environment, parent, template_name, blocks)
class TemplateReference(object):
@@ -88,20 +113,16 @@ class TemplateReference(object):
return BlockReference(name, self.__context, blocks, 0)
def __repr__(self):
- return '<%s %r>' % (
- self.__class__.__name__,
- self.__context.name
- )
+ return "<%s %r>" % (self.__class__.__name__, self.__context.name)
def _get_func(x):
- return getattr(x, '__func__', x)
+ return getattr(x, "__func__", x)
class ContextMeta(type):
-
- def __new__(cls, name, bases, d):
- rv = type.__new__(cls, name, bases, d)
+ def __new__(mcs, name, bases, d):
+ rv = type.__new__(mcs, name, bases, d)
if bases == ():
return rv
@@ -112,11 +133,15 @@ class ContextMeta(type):
# If we have a changed resolve but no changed default or missing
# resolve we invert the call logic.
- if resolve is not default_resolve and \
- resolve_or_missing is default_resolve_or_missing:
+ if (
+ resolve is not default_resolve
+ and resolve_or_missing is default_resolve_or_missing
+ ):
rv._legacy_resolve_mode = True
- elif resolve is default_resolve and \
- resolve_or_missing is default_resolve_or_missing:
+ elif (
+ resolve is default_resolve
+ and resolve_or_missing is default_resolve_or_missing
+ ):
rv._fast_resolve_mode = True
return rv
@@ -149,6 +174,7 @@ class Context(with_metaclass(ContextMeta)):
method that doesn't fail with a `KeyError` but returns an
:class:`Undefined` object for missing variables.
"""
+
# XXX: we want to eventually make this be a deprecation warning and
# remove it.
_legacy_resolve_mode = False
@@ -179,9 +205,9 @@ class Context(with_metaclass(ContextMeta)):
index = blocks.index(current) + 1
blocks[index]
except LookupError:
- return self.environment.undefined('there is no parent block '
- 'called %r.' % name,
- name='super')
+ return self.environment.undefined(
+ "there is no parent block called %r." % name, name="super"
+ )
return BlockReference(name, self, blocks, index)
def get(self, key, default=None):
@@ -232,7 +258,7 @@ class Context(with_metaclass(ContextMeta)):
return dict(self.parent, **self.vars)
@internalcode
- def call(__self, __obj, *args, **kwargs):
+ def call(__self, __obj, *args, **kwargs): # noqa: B902
"""Call the callable with the arguments and keyword arguments
provided but inject the active context or environment as first
argument if the callable is a :func:`contextfunction` or
@@ -242,55 +268,62 @@ class Context(with_metaclass(ContextMeta)):
__traceback_hide__ = True # noqa
# Allow callable classes to take a context
- if hasattr(__obj, '__call__'):
+ if hasattr(__obj, "__call__"): # noqa: B004
fn = __obj.__call__
- for fn_type in ('contextfunction',
- 'evalcontextfunction',
- 'environmentfunction'):
+ for fn_type in (
+ "contextfunction",
+ "evalcontextfunction",
+ "environmentfunction",
+ ):
if hasattr(fn, fn_type):
__obj = fn
break
- if isinstance(__obj, _context_function_types):
- if getattr(__obj, 'contextfunction', 0):
+ if callable(__obj):
+ if getattr(__obj, "contextfunction", False) is True:
args = (__self,) + args
- elif getattr(__obj, 'evalcontextfunction', 0):
+ elif getattr(__obj, "evalcontextfunction", False) is True:
args = (__self.eval_ctx,) + args
- elif getattr(__obj, 'environmentfunction', 0):
+ elif getattr(__obj, "environmentfunction", False) is True:
args = (__self.environment,) + args
try:
return __obj(*args, **kwargs)
except StopIteration:
- return __self.environment.undefined('value was undefined because '
- 'a callable raised a '
- 'StopIteration exception')
+ return __self.environment.undefined(
+ "value was undefined because "
+ "a callable raised a "
+ "StopIteration exception"
+ )
def derived(self, locals=None):
"""Internal helper function to create a derived context. This is
used in situations where the system needs a new context in the same
template that is independent.
"""
- context = new_context(self.environment, self.name, {},
- self.get_all(), True, None, locals)
+ context = new_context(
+ self.environment, self.name, {}, self.get_all(), True, None, locals
+ )
context.eval_ctx = self.eval_ctx
context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
return context
- def _all(meth):
- proxy = lambda self: getattr(self.get_all(), meth)()
+ def _all(meth): # noqa: B902
+ def proxy(self):
+ return getattr(self.get_all(), meth)()
+
proxy.__doc__ = getattr(dict, meth).__doc__
proxy.__name__ = meth
return proxy
- keys = _all('keys')
- values = _all('values')
- items = _all('items')
+ keys = _all("keys")
+ values = _all("values")
+ items = _all("items")
# not available on python 3
if PY2:
- iterkeys = _all('iterkeys')
- itervalues = _all('itervalues')
- iteritems = _all('iteritems')
+ iterkeys = _all("iterkeys")
+ itervalues = _all("itervalues")
+ iteritems = _all("iteritems")
del _all
def __contains__(self, name):
@@ -306,23 +339,14 @@ class Context(with_metaclass(ContextMeta)):
return item
def __repr__(self):
- return '<%s %s of %r>' % (
+ return "<%s %s of %r>" % (
self.__class__.__name__,
repr(self.get_all()),
- self.name
+ self.name,
)
-# register the context as mapping if possible
-try:
- from collections.abc import Mapping
- Mapping.register(Context)
-except ImportError:
- try:
- from collections import Mapping
- Mapping.register(Context)
- except ImportError:
- pass
+abc.Mapping.register(Context)
class BlockReference(object):
@@ -338,11 +362,10 @@ class BlockReference(object):
def super(self):
"""Super the block."""
if self._depth + 1 >= len(self._stack):
- return self._context.environment. \
- undefined('there is no parent block called %r.' %
- self.name, name='super')
- return BlockReference(self.name, self._context, self._stack,
- self._depth + 1)
+ return self._context.environment.undefined(
+ "there is no parent block called %r." % self.name, name="super"
+ )
+ return BlockReference(self.name, self._context, self._stack, self._depth + 1)
@internalcode
def __call__(self):
@@ -352,143 +375,212 @@ class BlockReference(object):
return rv
-class LoopContextBase(object):
- """A loop context for dynamic iteration."""
+@implements_iterator
+class LoopContext:
+ """A wrapper iterable for dynamic ``for`` loops, with information
+ about the loop and iteration.
+ """
+
+ #: Current iteration of the loop, starting at 0.
+ index0 = -1
- _before = _first_iteration
- _current = _first_iteration
- _after = _last_iteration
_length = None
+ _after = missing
+ _current = missing
+ _before = missing
+ _last_changed_value = missing
- def __init__(self, undefined, recurse=None, depth0=0):
+ def __init__(self, iterable, undefined, recurse=None, depth0=0):
+ """
+ :param iterable: Iterable to wrap.
+ :param undefined: :class:`Undefined` class to use for next and
+ previous items.
+ :param recurse: The function to render the loop body when the
+ loop is marked recursive.
+ :param depth0: Incremented when looping recursively.
+ """
+ self._iterable = iterable
+ self._iterator = self._to_iterator(iterable)
self._undefined = undefined
self._recurse = recurse
- self.index0 = -1
+ #: How many levels deep a recursive loop currently is, starting at 0.
self.depth0 = depth0
- self._last_checked_value = missing
- def cycle(self, *args):
- """Cycles among the arguments with the current loop index."""
- if not args:
- raise TypeError('no items for cycling given')
- return args[self.index0 % len(args)]
+ @staticmethod
+ def _to_iterator(iterable):
+ return iter(iterable)
- def changed(self, *value):
- """Checks whether the value has changed since the last call."""
- if self._last_checked_value != value:
- self._last_checked_value = value
- return True
- return False
+ @property
+ def length(self):
+ """Length of the iterable.
- first = property(lambda x: x.index0 == 0)
- last = property(lambda x: x._after is _last_iteration)
- index = property(lambda x: x.index0 + 1)
- revindex = property(lambda x: x.length - x.index0)
- revindex0 = property(lambda x: x.length - x.index)
- depth = property(lambda x: x.depth0 + 1)
+ If the iterable is a generator or otherwise does not have a
+ size, it is eagerly evaluated to get a size.
+ """
+ if self._length is not None:
+ return self._length
- @property
- def previtem(self):
- if self._before is _first_iteration:
- return self._undefined('there is no previous item')
- return self._before
+ try:
+ self._length = len(self._iterable)
+ except TypeError:
+ iterable = list(self._iterator)
+ self._iterator = self._to_iterator(iterable)
+ self._length = len(iterable) + self.index + (self._after is not missing)
- @property
- def nextitem(self):
- if self._after is _last_iteration:
- return self._undefined('there is no next item')
- return self._after
+ return self._length
def __len__(self):
return self.length
- @internalcode
- def loop(self, iterable):
- if self._recurse is None:
- raise TypeError('Tried to call non recursive loop. Maybe you '
- "forgot the 'recursive' modifier.")
- return self._recurse(iterable, self._recurse, self.depth0 + 1)
+ @property
+ def depth(self):
+ """How many levels deep a recursive loop currently is, starting at 1."""
+ return self.depth0 + 1
- # a nifty trick to enhance the error message if someone tried to call
- # the the loop without or with too many arguments.
- __call__ = loop
- del loop
+ @property
+ def index(self):
+ """Current iteration of the loop, starting at 1."""
+ return self.index0 + 1
- def __repr__(self):
- return '<%s %r/%r>' % (
- self.__class__.__name__,
- self.index,
- self.length
- )
+ @property
+ def revindex0(self):
+ """Number of iterations from the end of the loop, ending at 0.
+ Requires calculating :attr:`length`.
+ """
+ return self.length - self.index
-class LoopContext(LoopContextBase):
+ @property
+ def revindex(self):
+ """Number of iterations from the end of the loop, ending at 1.
- def __init__(self, iterable, undefined, recurse=None, depth0=0):
- LoopContextBase.__init__(self, undefined, recurse, depth0)
- self._iterator = iter(iterable)
+ Requires calculating :attr:`length`.
+ """
+ return self.length - self.index0
- # try to get the length of the iterable early. This must be done
- # here because there are some broken iterators around where there
- # __len__ is the number of iterations left (i'm looking at your
- # listreverseiterator!).
- try:
- self._length = len(iterable)
- except (TypeError, AttributeError):
- self._length = None
- self._after = self._safe_next()
+ @property
+ def first(self):
+ """Whether this is the first iteration of the loop."""
+ return self.index0 == 0
+
+ def _peek_next(self):
+ """Return the next element in the iterable, or :data:`missing`
+ if the iterable is exhausted. Only peeks one item ahead, caching
+ the result in :attr:`_last` for use in subsequent checks. The
+ cache is reset when :meth:`__next__` is called.
+ """
+ if self._after is not missing:
+ return self._after
+
+ self._after = next(self._iterator, missing)
+ return self._after
@property
- def length(self):
- if self._length is None:
- # if was not possible to get the length of the iterator when
- # the loop context was created (ie: iterating over a generator)
- # we have to convert the iterable into a sequence and use the
- # length of that + the number of iterations so far.
- iterable = tuple(self._iterator)
- self._iterator = iter(iterable)
- iterations_done = self.index0 + 2
- self._length = len(iterable) + iterations_done
- return self._length
+ def last(self):
+ """Whether this is the last iteration of the loop.
- def __iter__(self):
- return LoopContextIterator(self)
+ Causes the iterable to advance early. See
+ :func:`itertools.groupby` for issues this can cause.
+ The :func:`groupby` filter avoids that issue.
+ """
+ return self._peek_next() is missing
- def _safe_next(self):
- try:
- return next(self._iterator)
- except StopIteration:
- return _last_iteration
+ @property
+ def previtem(self):
+ """The item in the previous iteration. Undefined during the
+ first iteration.
+ """
+ if self.first:
+ return self._undefined("there is no previous item")
+ return self._before
-@implements_iterator
-class LoopContextIterator(object):
- """The iterator for a loop context."""
- __slots__ = ('context',)
+ @property
+ def nextitem(self):
+ """The item in the next iteration. Undefined during the last
+ iteration.
- def __init__(self, context):
- self.context = context
+ Causes the iterable to advance early. See
+ :func:`itertools.groupby` for issues this can cause.
+ The :func:`groupby` filter avoids that issue.
+ """
+ rv = self._peek_next()
+
+ if rv is missing:
+ return self._undefined("there is no next item")
+
+ return rv
+
+ def cycle(self, *args):
+ """Return a value from the given args, cycling through based on
+ the current :attr:`index0`.
+
+ :param args: One or more values to cycle through.
+ """
+ if not args:
+ raise TypeError("no items for cycling given")
+
+ return args[self.index0 % len(args)]
+
+ def changed(self, *value):
+ """Return ``True`` if previously called with a different value
+ (including when called for the first time).
+
+ :param value: One or more values to compare to the last call.
+ """
+ if self._last_changed_value != value:
+ self._last_changed_value = value
+ return True
+
+ return False
def __iter__(self):
return self
def __next__(self):
- ctx = self.context
- ctx.index0 += 1
- if ctx._after is _last_iteration:
- raise StopIteration()
- ctx._before = ctx._current
- ctx._current = ctx._after
- ctx._after = ctx._safe_next()
- return ctx._current, ctx
+ if self._after is not missing:
+ rv = self._after
+ self._after = missing
+ else:
+ rv = next(self._iterator)
+
+ self.index0 += 1
+ self._before = self._current
+ self._current = rv
+ return rv, self
+
+ @internalcode
+ def __call__(self, iterable):
+ """When iterating over nested data, render the body of the loop
+ recursively with the given inner iterable data.
+
+ The loop must have the ``recursive`` marker for this to work.
+ """
+ if self._recurse is None:
+ raise TypeError(
+ "The loop must have the 'recursive' marker to be called recursively."
+ )
+
+ return self._recurse(iterable, self._recurse, depth=self.depth)
+
+ def __repr__(self):
+ return "<%s %d/%d>" % (self.__class__.__name__, self.index, self.length)
class Macro(object):
"""Wraps a macro function."""
- def __init__(self, environment, func, name, arguments,
- catch_kwargs, catch_varargs, caller,
- default_autoescape=None):
+ def __init__(
+ self,
+ environment,
+ func,
+ name,
+ arguments,
+ catch_kwargs,
+ catch_varargs,
+ caller,
+ default_autoescape=None,
+ ):
self._environment = environment
self._func = func
self._argument_count = len(arguments)
@@ -497,7 +589,7 @@ class Macro(object):
self.catch_kwargs = catch_kwargs
self.catch_varargs = catch_varargs
self.caller = caller
- self.explicit_caller = 'caller' in arguments
+ self.explicit_caller = "caller" in arguments
if default_autoescape is None:
default_autoescape = environment.autoescape
self._default_autoescape = default_autoescape
@@ -509,9 +601,8 @@ class Macro(object):
# decide largely based on compile-time information if a macro is
# safe or unsafe. While there was a volatile mode it was largely
# unused for deciding on escaping. This turns out to be
- # problemtic for macros because if a macro is safe or not not so
- # much depends on the escape mode when it was defined but when it
- # was used.
+ # problematic for macros because whether a macro is safe depends not
+ # on the escape mode when it was defined, but rather when it was used.
#
# Because however we export macros from the module system and
# there are historic callers that do not pass an eval context (and
@@ -519,7 +610,7 @@ class Macro(object):
# check here.
#
# This is considered safe because an eval context is not a valid
- # argument to callables otherwise anwyays. Worst case here is
+ # argument to callables otherwise anyway. Worst case here is
# that if no eval context is passed we fall back to the compile
# time autoescape flag.
if args and isinstance(args[0], EvalContext):
@@ -529,7 +620,7 @@ class Macro(object):
autoescape = self._default_autoescape
# try to consume the positional arguments
- arguments = list(args[:self._argument_count])
+ arguments = list(args[: self._argument_count])
off = len(arguments)
# For information why this is necessary refer to the handling
@@ -540,12 +631,12 @@ class Macro(object):
# arguments expected we start filling in keyword arguments
# and defaults.
if off != self._argument_count:
- for idx, name in enumerate(self.arguments[len(arguments):]):
+ for name in self.arguments[len(arguments) :]:
try:
value = kwargs.pop(name)
except KeyError:
value = missing
- if name == 'caller':
+ if name == "caller":
found_caller = True
arguments.append(value)
else:
@@ -555,26 +646,31 @@ class Macro(object):
# if not also changed in the compiler's `function_scoping` method.
# the order is caller, keyword arguments, positional arguments!
if self.caller and not found_caller:
- caller = kwargs.pop('caller', None)
+ caller = kwargs.pop("caller", None)
if caller is None:
- caller = self._environment.undefined('No caller defined',
- name='caller')
+ caller = self._environment.undefined("No caller defined", name="caller")
arguments.append(caller)
if self.catch_kwargs:
arguments.append(kwargs)
elif kwargs:
- if 'caller' in kwargs:
- raise TypeError('macro %r was invoked with two values for '
- 'the special caller argument. This is '
- 'most likely a bug.' % self.name)
- raise TypeError('macro %r takes no keyword argument %r' %
- (self.name, next(iter(kwargs))))
+ if "caller" in kwargs:
+ raise TypeError(
+ "macro %r was invoked with two values for "
+ "the special caller argument. This is "
+ "most likely a bug." % self.name
+ )
+ raise TypeError(
+ "macro %r takes no keyword argument %r"
+ % (self.name, next(iter(kwargs)))
+ )
if self.catch_varargs:
- arguments.append(args[self._argument_count:])
+ arguments.append(args[self._argument_count :])
elif len(args) > self._argument_count:
- raise TypeError('macro %r takes not more than %d argument(s)' %
- (self.name, len(self.arguments)))
+ raise TypeError(
+ "macro %r takes not more than %d argument(s)"
+ % (self.name, len(self.arguments))
+ )
return self._invoke(arguments, autoescape)
@@ -586,16 +682,16 @@ class Macro(object):
return rv
def __repr__(self):
- return '<%s %s>' % (
+ return "<%s %s>" % (
self.__class__.__name__,
- self.name is None and 'anonymous' or repr(self.name)
+ self.name is None and "anonymous" or repr(self.name),
)
@implements_to_string
class Undefined(object):
"""The default undefined type. This undefined type can be printed and
- iterated over, but every other access will raise an :exc:`jinja2.exceptions.UndefinedError`:
+ iterated over, but every other access will raise an :exc:`UndefinedError`:
>>> foo = Undefined(name='foo')
>>> str(foo)
@@ -607,8 +703,13 @@ class Undefined(object):
...
jinja2.exceptions.UndefinedError: 'foo' is undefined
"""
- __slots__ = ('_undefined_hint', '_undefined_obj', '_undefined_name',
- '_undefined_exception')
+
+ __slots__ = (
+ "_undefined_hint",
+ "_undefined_obj",
+ "_undefined_name",
+ "_undefined_exception",
+ )
def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
self._undefined_hint = hint
@@ -616,40 +717,86 @@ class Undefined(object):
self._undefined_name = name
self._undefined_exception = exc
+ @property
+ def _undefined_message(self):
+ """Build a message about the undefined value based on how it was
+ accessed.
+ """
+ if self._undefined_hint:
+ return self._undefined_hint
+
+ if self._undefined_obj is missing:
+ return "%r is undefined" % self._undefined_name
+
+ if not isinstance(self._undefined_name, string_types):
+ return "%s has no element %r" % (
+ object_type_repr(self._undefined_obj),
+ self._undefined_name,
+ )
+
+ return "%r has no attribute %r" % (
+ object_type_repr(self._undefined_obj),
+ self._undefined_name,
+ )
+
@internalcode
def _fail_with_undefined_error(self, *args, **kwargs):
- """Regular callback function for undefined objects that raises an
- `jinja2.exceptions.UndefinedError` on call.
+ """Raise an :exc:`UndefinedError` when operations are performed
+ on the undefined value.
"""
- if self._undefined_hint is None:
- if self._undefined_obj is missing:
- hint = '%r is undefined' % self._undefined_name
- elif not isinstance(self._undefined_name, string_types):
- hint = '%s has no element %r' % (
- object_type_repr(self._undefined_obj),
- self._undefined_name
- )
- else:
- hint = '%r has no attribute %r' % (
- object_type_repr(self._undefined_obj),
- self._undefined_name
- )
- else:
- hint = self._undefined_hint
- raise self._undefined_exception(hint)
+ raise self._undefined_exception(self._undefined_message)
@internalcode
def __getattr__(self, name):
- if name[:2] == '__':
+ if name[:2] == "__":
raise AttributeError(name)
return self._fail_with_undefined_error()
- __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \
- __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \
- __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \
- __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = \
- __float__ = __complex__ = __pow__ = __rpow__ = __sub__ = \
- __rsub__ = _fail_with_undefined_error
+ __add__ = (
+ __radd__
+ ) = (
+ __mul__
+ ) = (
+ __rmul__
+ ) = (
+ __div__
+ ) = (
+ __rdiv__
+ ) = (
+ __truediv__
+ ) = (
+ __rtruediv__
+ ) = (
+ __floordiv__
+ ) = (
+ __rfloordiv__
+ ) = (
+ __mod__
+ ) = (
+ __rmod__
+ ) = (
+ __pos__
+ ) = (
+ __neg__
+ ) = (
+ __call__
+ ) = (
+ __getitem__
+ ) = (
+ __lt__
+ ) = (
+ __le__
+ ) = (
+ __gt__
+ ) = (
+ __ge__
+ ) = (
+ __int__
+ ) = (
+ __float__
+ ) = (
+ __complex__
+ ) = __pow__ = __rpow__ = __sub__ = __rsub__ = _fail_with_undefined_error
def __eq__(self, other):
return type(self) is type(other)
@@ -661,7 +808,7 @@ class Undefined(object):
return id(type(self))
def __str__(self):
- return u''
+ return u""
def __len__(self):
return 0
@@ -672,10 +819,11 @@ class Undefined(object):
def __nonzero__(self):
return False
+
__bool__ = __nonzero__
def __repr__(self):
- return 'Undefined'
+ return "Undefined"
def make_logging_undefined(logger=None, base=None):
@@ -700,6 +848,7 @@ def make_logging_undefined(logger=None, base=None):
"""
if logger is None:
import logging
+
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler(sys.stderr))
if base is None:
@@ -708,26 +857,27 @@ def make_logging_undefined(logger=None, base=None):
def _log_message(undef):
if undef._undefined_hint is None:
if undef._undefined_obj is missing:
- hint = '%s is undefined' % undef._undefined_name
+ hint = "%s is undefined" % undef._undefined_name
elif not isinstance(undef._undefined_name, string_types):
- hint = '%s has no element %s' % (
+ hint = "%s has no element %s" % (
object_type_repr(undef._undefined_obj),
- undef._undefined_name)
+ undef._undefined_name,
+ )
else:
- hint = '%s has no attribute %s' % (
+ hint = "%s has no attribute %s" % (
object_type_repr(undef._undefined_obj),
- undef._undefined_name)
+ undef._undefined_name,
+ )
else:
hint = undef._undefined_hint
- logger.warning('Template variable warning: %s', hint)
+ logger.warning("Template variable warning: %s", hint)
class LoggingUndefined(base):
-
def _fail_with_undefined_error(self, *args, **kwargs):
try:
return base._fail_with_undefined_error(self, *args, **kwargs)
except self._undefined_exception as e:
- logger.error('Template variable error: %s', str(e))
+ logger.error("Template variable error: %s", str(e))
raise e
def __str__(self):
@@ -741,6 +891,7 @@ def make_logging_undefined(logger=None, base=None):
return rv
if PY2:
+
def __nonzero__(self):
rv = base.__nonzero__(self)
_log_message(self)
@@ -750,7 +901,9 @@ def make_logging_undefined(logger=None, base=None):
rv = base.__unicode__(self)
_log_message(self)
return rv
+
else:
+
def __bool__(self):
rv = base.__bool__(self)
_log_message(self)
@@ -759,6 +912,36 @@ def make_logging_undefined(logger=None, base=None):
return LoggingUndefined
+# No @implements_to_string decorator here because __str__
+# is not overwritten from Undefined in this class.
+# This would cause a recursion error in Python 2.
+class ChainableUndefined(Undefined):
+ """An undefined that is chainable, where both ``__getattr__`` and
+ ``__getitem__`` return itself rather than raising an
+ :exc:`UndefinedError`.
+
+ >>> foo = ChainableUndefined(name='foo')
+ >>> str(foo.bar['baz'])
+ ''
+ >>> foo.bar['baz'] + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+
+ .. versionadded:: 2.11.0
+ """
+
+ __slots__ = ()
+
+ def __html__(self):
+ return self.__str__()
+
+ def __getattr__(self, _):
+ return self
+
+ __getitem__ = __getattr__
+
+
@implements_to_string
class DebugUndefined(Undefined):
"""An undefined that returns the debug info when printed.
@@ -773,17 +956,18 @@ class DebugUndefined(Undefined):
...
jinja2.exceptions.UndefinedError: 'foo' is undefined
"""
+
__slots__ = ()
def __str__(self):
if self._undefined_hint is None:
if self._undefined_obj is missing:
- return u'{{ %s }}' % self._undefined_name
- return '{{ no such element: %s[%r] }}' % (
+ return u"{{ %s }}" % self._undefined_name
+ return "{{ no such element: %s[%r] }}" % (
object_type_repr(self._undefined_obj),
- self._undefined_name
+ self._undefined_name,
)
- return u'{{ undefined value printed: %s }}' % self._undefined_hint
+ return u"{{ undefined value printed: %s }}" % self._undefined_hint
@implements_to_string
@@ -806,12 +990,22 @@ class StrictUndefined(Undefined):
...
jinja2.exceptions.UndefinedError: 'foo' is undefined
"""
+
__slots__ = ()
- __iter__ = __str__ = __len__ = __nonzero__ = __eq__ = \
- __ne__ = __bool__ = __hash__ = \
- Undefined._fail_with_undefined_error
+ __iter__ = (
+ __str__
+ ) = (
+ __len__
+ ) = (
+ __nonzero__
+ ) = __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
# remove remaining slots attributes, after the metaclass did the magic they
# are unneeded and irritating as they contain wrong data for the subclasses.
-del Undefined.__slots__, DebugUndefined.__slots__, StrictUndefined.__slots__
+del (
+ Undefined.__slots__,
+ ChainableUndefined.__slots__,
+ DebugUndefined.__slots__,
+ StrictUndefined.__slots__,
+)
diff --git a/lib/spack/external/jinja2/sandbox.py b/lib/spack/external/jinja2/sandbox.py
index b9e5ec495a..cfd7993aee 100644
--- a/lib/spack/external/jinja2/sandbox.py
+++ b/lib/spack/external/jinja2/sandbox.py
@@ -1,76 +1,66 @@
# -*- coding: utf-8 -*-
+"""A sandbox layer that ensures unsafe operations cannot be performed.
+Useful when the template itself comes from an untrusted source.
"""
- jinja2.sandbox
- ~~~~~~~~~~~~~~
-
- Adds a sandbox layer to Jinja as it was the default behavior in the old
- Jinja 1 releases. This sandbox is slightly different from Jinja 1 as the
- default behavior is easier to use.
-
- The behavior can be changed by subclassing the environment.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD.
-"""
-import types
import operator
-import sys
-from jinja2.environment import Environment
-from jinja2.exceptions import SecurityError
-from jinja2._compat import string_types, PY2
-from jinja2.utils import Markup
-
-from markupsafe import EscapeFormatter
+import types
+import warnings
+from collections import deque
from string import Formatter
-if sys.version_info >= (3, 3):
- from collections.abc import Mapping
-else:
- from collections import Mapping
+from markupsafe import EscapeFormatter
+from markupsafe import Markup
+from ._compat import abc
+from ._compat import PY2
+from ._compat import range_type
+from ._compat import string_types
+from .environment import Environment
+from .exceptions import SecurityError
#: maximum number of items a range may produce
MAX_RANGE = 100000
#: attributes of function objects that are considered unsafe.
if PY2:
- UNSAFE_FUNCTION_ATTRIBUTES = set(['func_closure', 'func_code', 'func_dict',
- 'func_defaults', 'func_globals'])
+ UNSAFE_FUNCTION_ATTRIBUTES = {
+ "func_closure",
+ "func_code",
+ "func_dict",
+ "func_defaults",
+ "func_globals",
+ }
else:
# On versions > python 2 the special attributes on functions are gone,
# but they remain on methods and generators for whatever reason.
UNSAFE_FUNCTION_ATTRIBUTES = set()
-
#: unsafe method attributes. function attributes are unsafe for methods too
-UNSAFE_METHOD_ATTRIBUTES = set(['im_class', 'im_func', 'im_self'])
+UNSAFE_METHOD_ATTRIBUTES = {"im_class", "im_func", "im_self"}
-#: unsafe generator attirbutes.
-UNSAFE_GENERATOR_ATTRIBUTES = set(['gi_frame', 'gi_code'])
+#: unsafe generator attributes.
+UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
#: unsafe attributes on coroutines
-UNSAFE_COROUTINE_ATTRIBUTES = set(['cr_frame', 'cr_code'])
+UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"}
#: unsafe attributes on async generators
-UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = set(['ag_code', 'ag_frame'])
-
-import warnings
+UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
# make sure we don't warn in python 2.6 about stuff we don't care about
-warnings.filterwarnings('ignore', 'the sets module', DeprecationWarning,
- module='jinja2.sandbox')
-
-from collections import deque
+warnings.filterwarnings(
+ "ignore", "the sets module", DeprecationWarning, module=__name__
+)
_mutable_set_types = (set,)
_mutable_mapping_types = (dict,)
_mutable_sequence_types = (list,)
-
# on python 2.x we can register the user collection types
try:
from UserDict import UserDict, DictMixin
from UserList import UserList
+
_mutable_mapping_types += (UserDict, DictMixin)
_mutable_set_types += (UserList,)
except ImportError:
@@ -79,39 +69,60 @@ except ImportError:
# if sets is still available, register the mutable set from there as well
try:
from sets import Set
+
_mutable_set_types += (Set,)
except ImportError:
pass
#: register Python 2.6 abstract base classes
-if sys.version_info >= (3, 3):
- from collections.abc import MutableSet, MutableMapping, MutableSequence
-else:
- from collections import MutableSet, MutableMapping, MutableSequence
-_mutable_set_types += (MutableSet,)
-_mutable_mapping_types += (MutableMapping,)
-_mutable_sequence_types += (MutableSequence,)
-
+_mutable_set_types += (abc.MutableSet,)
+_mutable_mapping_types += (abc.MutableMapping,)
+_mutable_sequence_types += (abc.MutableSequence,)
_mutable_spec = (
- (_mutable_set_types, frozenset([
- 'add', 'clear', 'difference_update', 'discard', 'pop', 'remove',
- 'symmetric_difference_update', 'update'
- ])),
- (_mutable_mapping_types, frozenset([
- 'clear', 'pop', 'popitem', 'setdefault', 'update'
- ])),
- (_mutable_sequence_types, frozenset([
- 'append', 'reverse', 'insert', 'sort', 'extend', 'remove'
- ])),
- (deque, frozenset([
- 'append', 'appendleft', 'clear', 'extend', 'extendleft', 'pop',
- 'popleft', 'remove', 'rotate'
- ]))
+ (
+ _mutable_set_types,
+ frozenset(
+ [
+ "add",
+ "clear",
+ "difference_update",
+ "discard",
+ "pop",
+ "remove",
+ "symmetric_difference_update",
+ "update",
+ ]
+ ),
+ ),
+ (
+ _mutable_mapping_types,
+ frozenset(["clear", "pop", "popitem", "setdefault", "update"]),
+ ),
+ (
+ _mutable_sequence_types,
+ frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]),
+ ),
+ (
+ deque,
+ frozenset(
+ [
+ "append",
+ "appendleft",
+ "clear",
+ "extend",
+ "extendleft",
+ "pop",
+ "popleft",
+ "remove",
+ "rotate",
+ ]
+ ),
+ ),
)
-class _MagicFormatMapping(Mapping):
+class _MagicFormatMapping(abc.Mapping):
"""This class implements a dummy wrapper to fix a bug in the Python
standard library for string formatting.
@@ -125,7 +136,7 @@ class _MagicFormatMapping(Mapping):
self._last_index = 0
def __getitem__(self, key):
- if key == '':
+ if key == "":
idx = self._last_index
self._last_index += 1
try:
@@ -143,9 +154,9 @@ class _MagicFormatMapping(Mapping):
def inspect_format_method(callable):
- if not isinstance(callable, (types.MethodType,
- types.BuiltinMethodType)) or \
- callable.__name__ != 'format':
+ if not isinstance(
+ callable, (types.MethodType, types.BuiltinMethodType)
+ ) or callable.__name__ not in ("format", "format_map"):
return None
obj = callable.__self__
if isinstance(obj, string_types):
@@ -156,10 +167,14 @@ def safe_range(*args):
"""A range that can't generate ranges with a length of more than
MAX_RANGE items.
"""
- rng = range(*args)
+ rng = range_type(*args)
+
if len(rng) > MAX_RANGE:
- raise OverflowError('range too big, maximum size for range is %d' %
- MAX_RANGE)
+ raise OverflowError(
+ "Range too big. The sandbox blocks ranges larger than"
+ " MAX_RANGE (%d)." % MAX_RANGE
+ )
+
return rng
@@ -192,24 +207,25 @@ def is_internal_attribute(obj, attr):
if attr in UNSAFE_FUNCTION_ATTRIBUTES:
return True
elif isinstance(obj, types.MethodType):
- if attr in UNSAFE_FUNCTION_ATTRIBUTES or \
- attr in UNSAFE_METHOD_ATTRIBUTES:
+ if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES:
return True
elif isinstance(obj, type):
- if attr == 'mro':
+ if attr == "mro":
return True
elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
return True
elif isinstance(obj, types.GeneratorType):
if attr in UNSAFE_GENERATOR_ATTRIBUTES:
return True
- elif hasattr(types, 'CoroutineType') and isinstance(obj, types.CoroutineType):
+ elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType):
if attr in UNSAFE_COROUTINE_ATTRIBUTES:
return True
- elif hasattr(types, 'AsyncGeneratorType') and isinstance(obj, types.AsyncGeneratorType):
+ elif hasattr(types, "AsyncGeneratorType") and isinstance(
+ obj, types.AsyncGeneratorType
+ ):
if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
return True
- return attr.startswith('__')
+ return attr.startswith("__")
def modifies_known_mutable(obj, attr):
@@ -250,28 +266,26 @@ class SandboxedEnvironment(Environment):
raised. However also other exceptions may occur during the rendering so
the caller has to ensure that all exceptions are caught.
"""
+
sandboxed = True
#: default callback table for the binary operators. A copy of this is
#: available on each instance of a sandboxed environment as
#: :attr:`binop_table`
default_binop_table = {
- '+': operator.add,
- '-': operator.sub,
- '*': operator.mul,
- '/': operator.truediv,
- '//': operator.floordiv,
- '**': operator.pow,
- '%': operator.mod
+ "+": operator.add,
+ "-": operator.sub,
+ "*": operator.mul,
+ "/": operator.truediv,
+ "//": operator.floordiv,
+ "**": operator.pow,
+ "%": operator.mod,
}
#: default callback table for the unary operators. A copy of this is
#: available on each instance of a sandboxed environment as
#: :attr:`unop_table`
- default_unop_table = {
- '+': operator.pos,
- '-': operator.neg
- }
+ default_unop_table = {"+": operator.pos, "-": operator.neg}
#: a set of binary operators that should be intercepted. Each operator
#: that is added to this set (empty by default) is delegated to the
@@ -307,7 +321,7 @@ class SandboxedEnvironment(Environment):
def intercept_unop(self, operator):
"""Called during template compilation with the name of a unary
operator to check if it should be intercepted at runtime. If this
- method returns `True`, :meth:`call_unop` is excuted for this unary
+ method returns `True`, :meth:`call_unop` is executed for this unary
operator. The default implementation of :meth:`call_unop` will use
the :attr:`unop_table` dictionary to perform the operator with the
same logic as the builtin one.
@@ -321,10 +335,9 @@ class SandboxedEnvironment(Environment):
"""
return False
-
def __init__(self, *args, **kwargs):
Environment.__init__(self, *args, **kwargs)
- self.globals['range'] = safe_range
+ self.globals["range"] = safe_range
self.binop_table = self.default_binop_table.copy()
self.unop_table = self.default_unop_table.copy()
@@ -335,7 +348,7 @@ class SandboxedEnvironment(Environment):
special attributes of internal python objects as returned by the
:func:`is_internal_attribute` function.
"""
- return not (attr.startswith('_') or is_internal_attribute(obj, attr))
+ return not (attr.startswith("_") or is_internal_attribute(obj, attr))
def is_safe_callable(self, obj):
"""Check if an object is safely callable. Per default a function is
@@ -343,8 +356,9 @@ class SandboxedEnvironment(Environment):
True. Override this method to alter the behavior, but this won't
affect the `unsafe` decorator from this module.
"""
- return not (getattr(obj, 'unsafe_callable', False) or
- getattr(obj, 'alters_data', False))
+ return not (
+ getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
+ )
def call_binop(self, context, operator, left, right):
"""For intercepted binary operator calls (:meth:`intercepted_binops`)
@@ -404,13 +418,15 @@ class SandboxedEnvironment(Environment):
def unsafe_undefined(self, obj, attribute):
"""Return an undefined object for unsafe attributes."""
- return self.undefined('access to attribute %r of %r '
- 'object is unsafe.' % (
- attribute,
- obj.__class__.__name__
- ), name=attribute, obj=obj, exc=SecurityError)
-
- def format_string(self, s, args, kwargs):
+ return self.undefined(
+ "access to attribute %r of %r "
+ "object is unsafe." % (attribute, obj.__class__.__name__),
+ name=attribute,
+ obj=obj,
+ exc=SecurityError,
+ )
+
+ def format_string(self, s, args, kwargs, format_func=None):
"""If a format call is detected, then this is routed through this
method so that our safety sandbox can be used for it.
"""
@@ -418,20 +434,31 @@ class SandboxedEnvironment(Environment):
formatter = SandboxedEscapeFormatter(self, s.escape)
else:
formatter = SandboxedFormatter(self)
+
+ if format_func is not None and format_func.__name__ == "format_map":
+ if len(args) != 1 or kwargs:
+ raise TypeError(
+ "format_map() takes exactly one argument %d given"
+ % (len(args) + (kwargs is not None))
+ )
+
+ kwargs = args[0]
+ args = None
+
kwargs = _MagicFormatMapping(args, kwargs)
rv = formatter.vformat(s, args, kwargs)
return type(s)(rv)
- def call(__self, __context, __obj, *args, **kwargs):
+ def call(__self, __context, __obj, *args, **kwargs): # noqa: B902
"""Call an object from sandboxed code."""
fmt = inspect_format_method(__obj)
if fmt is not None:
- return __self.format_string(fmt, args, kwargs)
+ return __self.format_string(fmt, args, kwargs, __obj)
# the double prefixes are to avoid double keyword argument
# errors when proxying the call.
if not __self.is_safe_callable(__obj):
- raise SecurityError('%r is not safely callable' % (__obj,))
+ raise SecurityError("%r is not safely callable" % (__obj,))
return __context.call(__obj, *args, **kwargs)
@@ -447,16 +474,16 @@ class ImmutableSandboxedEnvironment(SandboxedEnvironment):
return not modifies_known_mutable(obj, attr)
-# This really is not a public API apparenlty.
+# This really is not a public API apparently.
try:
from _string import formatter_field_name_split
except ImportError:
+
def formatter_field_name_split(field_name):
return field_name._formatter_field_name_split()
class SandboxedFormatterMixin(object):
-
def __init__(self, env):
self._env = env
@@ -470,14 +497,14 @@ class SandboxedFormatterMixin(object):
obj = self._env.getitem(obj, i)
return obj, first
-class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
+class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
def __init__(self, env):
SandboxedFormatterMixin.__init__(self, env)
Formatter.__init__(self)
-class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
+class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
def __init__(self, env, escape):
SandboxedFormatterMixin.__init__(self, env)
EscapeFormatter.__init__(self, escape)
diff --git a/lib/spack/external/jinja2/tests.py b/lib/spack/external/jinja2/tests.py
index d5d6b5b33f..fabd4ce51b 100644
--- a/lib/spack/external/jinja2/tests.py
+++ b/lib/spack/external/jinja2/tests.py
@@ -1,29 +1,17 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.tests
- ~~~~~~~~~~~~
-
- Jinja test functions. Used with the "is" operator.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
+"""Built-in template tests used with the ``is`` operator."""
+import decimal
import operator
import re
-import sys
-from jinja2.runtime import Undefined
-from jinja2._compat import text_type, string_types, integer_types
-import decimal
-if sys.version_info >= (3, 3):
- from collections.abc import Mapping
-else:
- from collections import Mapping
+from ._compat import abc
+from ._compat import integer_types
+from ._compat import string_types
+from ._compat import text_type
+from .runtime import Undefined
-number_re = re.compile(r'^-?\d+(\.\d+)?$')
+number_re = re.compile(r"^-?\d+(\.\d+)?$")
regex_type = type(number_re)
-
-
test_callable = callable
@@ -69,6 +57,48 @@ def test_none(value):
return value is None
+def test_boolean(value):
+ """Return true if the object is a boolean value.
+
+ .. versionadded:: 2.11
+ """
+ return value is True or value is False
+
+
+def test_false(value):
+ """Return true if the object is False.
+
+ .. versionadded:: 2.11
+ """
+ return value is False
+
+
+def test_true(value):
+ """Return true if the object is True.
+
+ .. versionadded:: 2.11
+ """
+ return value is True
+
+
+# NOTE: The existing 'number' test matches booleans and floats
+def test_integer(value):
+ """Return true if the object is an integer.
+
+ .. versionadded:: 2.11
+ """
+ return isinstance(value, integer_types) and value is not True and value is not False
+
+
+# NOTE: The existing 'number' test matches booleans and integers
+def test_float(value):
+ """Return true if the object is a float.
+
+ .. versionadded:: 2.11
+ """
+ return isinstance(value, float)
+
+
def test_lower(value):
"""Return true if the variable is lowercased."""
return text_type(value).islower()
@@ -89,7 +119,7 @@ def test_mapping(value):
.. versionadded:: 2.6
"""
- return isinstance(value, Mapping)
+ return isinstance(value, abc.Mapping)
def test_number(value):
@@ -104,7 +134,7 @@ def test_sequence(value):
try:
len(value)
value.__getitem__
- except:
+ except Exception:
return False
return True
@@ -133,7 +163,7 @@ def test_iterable(value):
def test_escaped(value):
"""Check if the value is escaped."""
- return hasattr(value, '__html__')
+ return hasattr(value, "__html__")
def test_in(value, seq):
@@ -145,36 +175,41 @@ def test_in(value, seq):
TESTS = {
- 'odd': test_odd,
- 'even': test_even,
- 'divisibleby': test_divisibleby,
- 'defined': test_defined,
- 'undefined': test_undefined,
- 'none': test_none,
- 'lower': test_lower,
- 'upper': test_upper,
- 'string': test_string,
- 'mapping': test_mapping,
- 'number': test_number,
- 'sequence': test_sequence,
- 'iterable': test_iterable,
- 'callable': test_callable,
- 'sameas': test_sameas,
- 'escaped': test_escaped,
- 'in': test_in,
- '==': operator.eq,
- 'eq': operator.eq,
- 'equalto': operator.eq,
- '!=': operator.ne,
- 'ne': operator.ne,
- '>': operator.gt,
- 'gt': operator.gt,
- 'greaterthan': operator.gt,
- 'ge': operator.ge,
- '>=': operator.ge,
- '<': operator.lt,
- 'lt': operator.lt,
- 'lessthan': operator.lt,
- '<=': operator.le,
- 'le': operator.le,
+ "odd": test_odd,
+ "even": test_even,
+ "divisibleby": test_divisibleby,
+ "defined": test_defined,
+ "undefined": test_undefined,
+ "none": test_none,
+ "boolean": test_boolean,
+ "false": test_false,
+ "true": test_true,
+ "integer": test_integer,
+ "float": test_float,
+ "lower": test_lower,
+ "upper": test_upper,
+ "string": test_string,
+ "mapping": test_mapping,
+ "number": test_number,
+ "sequence": test_sequence,
+ "iterable": test_iterable,
+ "callable": test_callable,
+ "sameas": test_sameas,
+ "escaped": test_escaped,
+ "in": test_in,
+ "==": operator.eq,
+ "eq": operator.eq,
+ "equalto": operator.eq,
+ "!=": operator.ne,
+ "ne": operator.ne,
+ ">": operator.gt,
+ "gt": operator.gt,
+ "greaterthan": operator.gt,
+ "ge": operator.ge,
+ ">=": operator.ge,
+ "<": operator.lt,
+ "lt": operator.lt,
+ "lessthan": operator.lt,
+ "<=": operator.le,
+ "le": operator.le,
}
diff --git a/lib/spack/external/jinja2/utils.py b/lib/spack/external/jinja2/utils.py
index cff4e783a8..6afca81055 100644
--- a/lib/spack/external/jinja2/utils.py
+++ b/lib/spack/external/jinja2/utils.py
@@ -1,44 +1,32 @@
# -*- coding: utf-8 -*-
-"""
- jinja2.utils
- ~~~~~~~~~~~~
-
- Utility functions.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD, see LICENSE for more details.
-"""
-import re
import json
-import errno
+import os
+import re
+import warnings
from collections import deque
+from random import choice
+from random import randrange
+from string import ascii_letters as _letters
+from string import digits as _digits
from threading import Lock
-from jinja2._compat import text_type, string_types, implements_iterator, \
- url_quote
+from markupsafe import escape
+from markupsafe import Markup
-_word_split_re = re.compile(r'(\s+)')
-_punctuation_re = re.compile(
- '^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % (
- '|'.join(map(re.escape, ('(', '<', '&lt;'))),
- '|'.join(map(re.escape, ('.', ',', ')', '>', '\n', '&gt;')))
- )
-)
-_simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$')
-_striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
-_entity_re = re.compile(r'&([^;]+);')
-_letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
-_digits = '0123456789'
+from ._compat import abc
+from ._compat import string_types
+from ._compat import text_type
+from ._compat import url_quote
# special singleton representing missing values for the runtime
-missing = type('MissingType', (), {'__repr__': lambda x: 'missing'})()
+missing = type("MissingType", (), {"__repr__": lambda x: "missing"})()
# internal code
internal_code = set()
-concat = u''.join
+concat = u"".join
-_slash_escape = '\\/' not in json.dumps('/')
+_slash_escape = "\\/" not in json.dumps("/")
def contextfunction(f):
@@ -98,24 +86,26 @@ def is_undefined(obj):
return default
return var
"""
- from jinja2.runtime import Undefined
+ from .runtime import Undefined
+
return isinstance(obj, Undefined)
def consume(iterable):
"""Consumes an iterable without doing anything with it."""
- for event in iterable:
+ for _ in iterable:
pass
def clear_caches():
- """Jinja2 keeps internal caches for environments and lexers. These are
- used so that Jinja2 doesn't have to recreate environments and lexers all
+ """Jinja keeps internal caches for environments and lexers. These are
+ used so that Jinja doesn't have to recreate environments and lexers all
the time. Normally you don't have to care about that but if you are
measuring memory consumption you may want to clean the caches.
"""
- from jinja2.environment import _spontaneous_environments
- from jinja2.lexer import _lexer_cache
+ from .environment import _spontaneous_environments
+ from .lexer import _lexer_cache
+
_spontaneous_environments.clear()
_lexer_cache.clear()
@@ -132,12 +122,10 @@ def import_string(import_name, silent=False):
:return: imported object
"""
try:
- if ':' in import_name:
- module, obj = import_name.split(':', 1)
- elif '.' in import_name:
- items = import_name.split('.')
- module = '.'.join(items[:-1])
- obj = items[-1]
+ if ":" in import_name:
+ module, obj = import_name.split(":", 1)
+ elif "." in import_name:
+ module, _, obj = import_name.rpartition(".")
else:
return __import__(import_name)
return getattr(__import__(module, None, None, [obj]), obj)
@@ -146,15 +134,14 @@ def import_string(import_name, silent=False):
raise
-def open_if_exists(filename, mode='rb'):
+def open_if_exists(filename, mode="rb"):
"""Returns a file descriptor for the filename if that file exists,
- otherwise `None`.
+ otherwise ``None``.
"""
- try:
- return open(filename, mode)
- except IOError as e:
- if e.errno not in (errno.ENOENT, errno.EISDIR, errno.EINVAL):
- raise
+ if not os.path.isfile(filename):
+ return None
+
+ return open(filename, mode)
def object_type_repr(obj):
@@ -163,15 +150,19 @@ def object_type_repr(obj):
example for `None` and `Ellipsis`).
"""
if obj is None:
- return 'None'
+ return "None"
elif obj is Ellipsis:
- return 'Ellipsis'
+ return "Ellipsis"
+
+ cls = type(obj)
+
# __builtin__ in 2.x, builtins in 3.x
- if obj.__class__.__module__ in ('__builtin__', 'builtins'):
- name = obj.__class__.__name__
+ if cls.__module__ in ("__builtin__", "builtins"):
+ name = cls.__name__
else:
- name = obj.__class__.__module__ + '.' + obj.__class__.__name__
- return '%s object' % name
+ name = cls.__module__ + "." + cls.__name__
+
+ return "%s object" % name
def pformat(obj, verbose=False):
@@ -180,9 +171,11 @@ def pformat(obj, verbose=False):
"""
try:
from pretty import pretty
+
return pretty(obj, verbose=verbose)
except ImportError:
from pprint import pformat
+
return pformat(obj)
@@ -200,45 +193,77 @@ def urlize(text, trim_url_limit=None, rel=None, target=None):
If target is not None, a target attribute will be added to the link.
"""
- trim_url = lambda x, limit=trim_url_limit: limit is not None \
- and (x[:limit] + (len(x) >=limit and '...'
- or '')) or x
- words = _word_split_re.split(text_type(escape(text)))
- rel_attr = rel and ' rel="%s"' % text_type(escape(rel)) or ''
- target_attr = target and ' target="%s"' % escape(target) or ''
+ trim_url = (
+ lambda x, limit=trim_url_limit: limit is not None
+ and (x[:limit] + (len(x) >= limit and "..." or ""))
+ or x
+ )
+ words = re.split(r"(\s+)", text_type(escape(text)))
+ rel_attr = rel and ' rel="%s"' % text_type(escape(rel)) or ""
+ target_attr = target and ' target="%s"' % escape(target) or ""
for i, word in enumerate(words):
- match = _punctuation_re.match(word)
+ head, middle, tail = "", word, ""
+ match = re.match(r"^([(<]|&lt;)+", middle)
+
if match:
- lead, middle, trail = match.groups()
- if middle.startswith('www.') or (
- '@' not in middle and
- not middle.startswith('http://') and
- not middle.startswith('https://') and
- len(middle) > 0 and
- middle[0] in _letters + _digits and (
- middle.endswith('.org') or
- middle.endswith('.net') or
- middle.endswith('.com')
- )):
- middle = '<a href="http://%s"%s%s>%s</a>' % (middle,
- rel_attr, target_attr, trim_url(middle))
- if middle.startswith('http://') or \
- middle.startswith('https://'):
- middle = '<a href="%s"%s%s>%s</a>' % (middle,
- rel_attr, target_attr, trim_url(middle))
- if '@' in middle and not middle.startswith('www.') and \
- not ':' in middle and _simple_email_re.match(middle):
- middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
- if lead + middle + trail != word:
- words[i] = lead + middle + trail
- return u''.join(words)
+ head = match.group()
+ middle = middle[match.end() :]
+
+ # Unlike lead, which is anchored to the start of the string,
+ # need to check that the string ends with any of the characters
+ # before trying to match all of them, to avoid backtracking.
+ if middle.endswith((")", ">", ".", ",", "\n", "&gt;")):
+ match = re.search(r"([)>.,\n]|&gt;)+$", middle)
+
+ if match:
+ tail = match.group()
+ middle = middle[: match.start()]
+
+ if middle.startswith("www.") or (
+ "@" not in middle
+ and not middle.startswith("http://")
+ and not middle.startswith("https://")
+ and len(middle) > 0
+ and middle[0] in _letters + _digits
+ and (
+ middle.endswith(".org")
+ or middle.endswith(".net")
+ or middle.endswith(".com")
+ )
+ ):
+ middle = '<a href="http://%s"%s%s>%s</a>' % (
+ middle,
+ rel_attr,
+ target_attr,
+ trim_url(middle),
+ )
+
+ if middle.startswith("http://") or middle.startswith("https://"):
+ middle = '<a href="%s"%s%s>%s</a>' % (
+ middle,
+ rel_attr,
+ target_attr,
+ trim_url(middle),
+ )
+
+ if (
+ "@" in middle
+ and not middle.startswith("www.")
+ and ":" not in middle
+ and re.match(r"^\S+@\w[\w.-]*\.\w+$", middle)
+ ):
+ middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
+
+ words[i] = head + middle + tail
+
+ return u"".join(words)
def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
"""Generate some lorem ipsum for the template."""
- from jinja2.constants import LOREM_IPSUM_WORDS
- from random import choice, randrange
+ from .constants import LOREM_IPSUM_WORDS
+
words = LOREM_IPSUM_WORDS.split()
result = []
@@ -263,43 +288,53 @@ def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
if idx - randrange(3, 8) > last_comma:
last_comma = idx
last_fullstop += 2
- word += ','
+ word += ","
# add end of sentences
if idx - randrange(10, 20) > last_fullstop:
last_comma = last_fullstop = idx
- word += '.'
+ word += "."
next_capitalized = True
p.append(word)
# ensure that the paragraph ends with a dot.
- p = u' '.join(p)
- if p.endswith(','):
- p = p[:-1] + '.'
- elif not p.endswith('.'):
- p += '.'
+ p = u" ".join(p)
+ if p.endswith(","):
+ p = p[:-1] + "."
+ elif not p.endswith("."):
+ p += "."
result.append(p)
if not html:
- return u'\n\n'.join(result)
- return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result))
+ return u"\n\n".join(result)
+ return Markup(u"\n".join(u"<p>%s</p>" % escape(x) for x in result))
+
+def unicode_urlencode(obj, charset="utf-8", for_qs=False):
+ """Quote a string for use in a URL using the given charset.
-def unicode_urlencode(obj, charset='utf-8', for_qs=False):
- """URL escapes a single bytestring or unicode string with the
- given charset if applicable to URL safe quoting under all rules
- that need to be considered under all supported Python versions.
+ This function is misnamed, it is a wrapper around
+ :func:`urllib.parse.quote`.
- If non strings are provided they are converted to their unicode
- representation first.
+ :param obj: String or bytes to quote. Other types are converted to
+ string then encoded to bytes using the given charset.
+ :param charset: Encode text to bytes using this charset.
+ :param for_qs: Quote "/" and use "+" for spaces.
"""
if not isinstance(obj, string_types):
obj = text_type(obj)
+
if isinstance(obj, text_type):
obj = obj.encode(charset)
- safe = not for_qs and b'/' or b''
- rv = text_type(url_quote(obj, safe))
+
+ safe = b"" if for_qs else b"/"
+ rv = url_quote(obj, safe)
+
+ if not isinstance(rv, text_type):
+ rv = rv.decode("utf-8")
+
if for_qs:
- rv = rv.replace('%20', '+')
+ rv = rv.replace("%20", "+")
+
return rv
@@ -326,9 +361,9 @@ class LRUCache(object):
def __getstate__(self):
return {
- 'capacity': self.capacity,
- '_mapping': self._mapping,
- '_queue': self._queue
+ "capacity": self.capacity,
+ "_mapping": self._mapping,
+ "_queue": self._queue,
}
def __setstate__(self, d):
@@ -342,7 +377,7 @@ class LRUCache(object):
"""Return a shallow copy of the instance."""
rv = self.__class__(self.capacity)
rv._mapping.update(self._mapping)
- rv._queue = deque(self._queue)
+ rv._queue.extend(self._queue)
return rv
def get(self, key, default=None):
@@ -356,15 +391,11 @@ class LRUCache(object):
"""Set `default` if the key is not in the cache otherwise
leave unchanged. Return the value of this key.
"""
- self._wlock.acquire()
try:
- try:
- return self[key]
- except KeyError:
- self[key] = default
- return default
- finally:
- self._wlock.release()
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return default
def clear(self):
"""Clear the cache."""
@@ -384,10 +415,7 @@ class LRUCache(object):
return len(self._mapping)
def __repr__(self):
- return '<%s %r>' % (
- self.__class__.__name__,
- self._mapping
- )
+ return "<%s %r>" % (self.__class__.__name__, self._mapping)
def __getitem__(self, key):
"""Get an item from the cache. Moves the item up so that it has the
@@ -436,7 +464,6 @@ class LRUCache(object):
try:
self._remove(key)
except ValueError:
- # __getitem__ is not locked, it might happen
pass
finally:
self._wlock.release()
@@ -449,6 +476,12 @@ class LRUCache(object):
def iteritems(self):
"""Iterate over all items."""
+ warnings.warn(
+ "'iteritems()' will be removed in version 3.0. Use"
+ " 'iter(cache.items())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
return iter(self.items())
def values(self):
@@ -457,6 +490,22 @@ class LRUCache(object):
def itervalue(self):
"""Iterate over all values."""
+ warnings.warn(
+ "'itervalue()' will be removed in version 3.0. Use"
+ " 'iter(cache.values())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return iter(self.values())
+
+ def itervalues(self):
+ """Iterate over all values."""
+ warnings.warn(
+ "'itervalues()' will be removed in version 3.0. Use"
+ " 'iter(cache.values())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
return iter(self.values())
def keys(self):
@@ -467,12 +516,19 @@ class LRUCache(object):
"""Iterate over all keys in the cache dict, ordered by
the most recent usage.
"""
- return reversed(tuple(self._queue))
+ warnings.warn(
+ "'iterkeys()' will be removed in version 3.0. Use"
+ " 'iter(cache.keys())' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return iter(self)
- __iter__ = iterkeys
+ def __iter__(self):
+ return reversed(tuple(self._queue))
def __reversed__(self):
- """Iterate over the values in the cache dict, oldest items
+ """Iterate over the keys in the cache dict, oldest items
coming first.
"""
return iter(tuple(self._queue))
@@ -480,22 +536,15 @@ class LRUCache(object):
__copy__ = copy
-# register the LRU cache as mutable mapping if possible
-try:
- from collections.abc import MutableMapping
- MutableMapping.register(LRUCache)
-except ImportError:
- try:
- from collections import MutableMapping
- MutableMapping.register(LRUCache)
- except ImportError:
- pass
+abc.MutableMapping.register(LRUCache)
-def select_autoescape(enabled_extensions=('html', 'htm', 'xml'),
- disabled_extensions=(),
- default_for_string=True,
- default=False):
+def select_autoescape(
+ enabled_extensions=("html", "htm", "xml"),
+ disabled_extensions=(),
+ default_for_string=True,
+ default=False,
+):
"""Intelligently sets the initial value of autoescaping based on the
filename of the template. This is the recommended way to configure
autoescaping if you do not want to write a custom function yourself.
@@ -530,10 +579,9 @@ def select_autoescape(enabled_extensions=('html', 'htm', 'xml'),
.. versionadded:: 2.9
"""
- enabled_patterns = tuple('.' + x.lstrip('.').lower()
- for x in enabled_extensions)
- disabled_patterns = tuple('.' + x.lstrip('.').lower()
- for x in disabled_extensions)
+ enabled_patterns = tuple("." + x.lstrip(".").lower() for x in enabled_extensions)
+ disabled_patterns = tuple("." + x.lstrip(".").lower() for x in disabled_extensions)
+
def autoescape(template_name):
if template_name is None:
return default_for_string
@@ -543,6 +591,7 @@ def select_autoescape(enabled_extensions=('html', 'htm', 'xml'),
if template_name.endswith(disabled_patterns):
return False
return default
+
return autoescape
@@ -566,35 +615,63 @@ def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
"""
if dumper is None:
dumper = json.dumps
- rv = dumper(obj, **kwargs) \
- .replace(u'<', u'\\u003c') \
- .replace(u'>', u'\\u003e') \
- .replace(u'&', u'\\u0026') \
- .replace(u"'", u'\\u0027')
+ rv = (
+ dumper(obj, **kwargs)
+ .replace(u"<", u"\\u003c")
+ .replace(u">", u"\\u003e")
+ .replace(u"&", u"\\u0026")
+ .replace(u"'", u"\\u0027")
+ )
return Markup(rv)
-@implements_iterator
class Cycler(object):
- """A cycle helper for templates."""
+ """Cycle through values by yield them one at a time, then restarting
+ once the end is reached. Available as ``cycler`` in templates.
+
+ Similar to ``loop.cycle``, but can be used outside loops or across
+ multiple loops. For example, render a list of folders and files in a
+ list, alternating giving them "odd" and "even" classes.
+
+ .. code-block:: html+jinja
+
+ {% set row_class = cycler("odd", "even") %}
+ <ul class="browser">
+ {% for folder in folders %}
+ <li class="folder {{ row_class.next() }}">{{ folder }}
+ {% endfor %}
+ {% for file in files %}
+ <li class="file {{ row_class.next() }}">{{ file }}
+ {% endfor %}
+ </ul>
+
+ :param items: Each positional argument will be yielded in the order
+ given for each cycle.
+
+ .. versionadded:: 2.1
+ """
def __init__(self, *items):
if not items:
- raise RuntimeError('at least one item has to be provided')
+ raise RuntimeError("at least one item has to be provided")
self.items = items
- self.reset()
+ self.pos = 0
def reset(self):
- """Resets the cycle."""
+ """Resets the current item to the first item."""
self.pos = 0
@property
def current(self):
- """Returns the current item."""
+ """Return the current item. Equivalent to the item that will be
+ returned next time :meth:`next` is called.
+ """
return self.items[self.pos]
def next(self):
- """Goes one item ahead and returns it."""
+ """Return the current item, then advance :attr:`current` to the
+ next item.
+ """
rv = self.current
self.pos = (self.pos + 1) % len(self.items)
return rv
@@ -605,27 +682,28 @@ class Cycler(object):
class Joiner(object):
"""A joining helper for templates."""
- def __init__(self, sep=u', '):
+ def __init__(self, sep=u", "):
self.sep = sep
self.used = False
def __call__(self):
if not self.used:
self.used = True
- return u''
+ return u""
return self.sep
class Namespace(object):
"""A namespace object that can hold arbitrary attributes. It may be
- initialized from a dictionary or with keyword argments."""
+ initialized from a dictionary or with keyword arguments."""
- def __init__(*args, **kwargs):
+ def __init__(*args, **kwargs): # noqa: B902
self, args = args[0], args[1:]
self.__attrs = dict(*args, **kwargs)
def __getattribute__(self, name):
- if name == '_Namespace__attrs':
+ # __class__ is needed for the awaitable check in async mode
+ if name in {"_Namespace__attrs", "__class__"}:
return object.__getattribute__(self, name)
try:
return self.__attrs[name]
@@ -636,16 +714,24 @@ class Namespace(object):
self.__attrs[name] = value
def __repr__(self):
- return '<Namespace %r>' % self.__attrs
+ return "<Namespace %r>" % self.__attrs
# does this python version support async for in and async generators?
try:
- exec('async def _():\n async for _ in ():\n yield _')
+ exec("async def _():\n async for _ in ():\n yield _")
have_async_gen = True
except SyntaxError:
have_async_gen = False
-# Imported here because that's where it was in the past
-from markupsafe import Markup, escape, soft_unicode
+def soft_unicode(s):
+ from markupsafe import soft_unicode
+
+ warnings.warn(
+ "'jinja2.utils.soft_unicode' will be removed in version 3.0."
+ " Use 'markupsafe.soft_unicode' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return soft_unicode(s)
diff --git a/lib/spack/external/jinja2/visitor.py b/lib/spack/external/jinja2/visitor.py
index ba526dfac9..d1365bf10e 100644
--- a/lib/spack/external/jinja2/visitor.py
+++ b/lib/spack/external/jinja2/visitor.py
@@ -1,14 +1,8 @@
# -*- coding: utf-8 -*-
+"""API for traversing the AST nodes. Implemented by the compiler and
+meta introspection.
"""
- jinja2.visitor
- ~~~~~~~~~~~~~~
-
- This module implements a visitor for the nodes.
-
- :copyright: (c) 2017 by the Jinja Team.
- :license: BSD.
-"""
-from jinja2.nodes import Node
+from .nodes import Node
class NodeVisitor(object):
@@ -28,7 +22,7 @@ class NodeVisitor(object):
exists for this node. In that case the generic visit function is
used instead.
"""
- method = 'visit_' + node.__class__.__name__
+ method = "visit_" + node.__class__.__name__
return getattr(self, method, None)
def visit(self, node, *args, **kwargs):
diff --git a/lib/spack/external/jsonschema/README.rst b/lib/spack/external/jsonschema/README.rst
deleted file mode 100644
index 20c2fe6266..0000000000
--- a/lib/spack/external/jsonschema/README.rst
+++ /dev/null
@@ -1,104 +0,0 @@
-==========
-jsonschema
-==========
-
-``jsonschema`` is an implementation of `JSON Schema <http://json-schema.org>`_
-for Python (supporting 2.6+ including Python 3).
-
-.. code-block:: python
-
- >>> from jsonschema import validate
-
- >>> # A sample schema, like what we'd get from json.load()
- >>> schema = {
- ... "type" : "object",
- ... "properties" : {
- ... "price" : {"type" : "number"},
- ... "name" : {"type" : "string"},
- ... },
- ... }
-
- >>> # If no exception is raised by validate(), the instance is valid.
- >>> validate({"name" : "Eggs", "price" : 34.99}, schema)
-
- >>> validate(
- ... {"name" : "Eggs", "price" : "Invalid"}, schema
- ... ) # doctest: +IGNORE_EXCEPTION_DETAIL
- Traceback (most recent call last):
- ...
- ValidationError: 'Invalid' is not of type 'number'
-
-
-Features
---------
-
-* Full support for
- `Draft 3 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft3Validator>`_
- **and** `Draft 4 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft4Validator>`_
- of the schema.
-
-* `Lazy validation <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
- that can iteratively report *all* validation errors.
-
-* Small and extensible
-
-* `Programmatic querying <https://python-jsonschema.readthedocs.org/en/latest/errors/#module-jsonschema>`_
- of which properties or items failed validation.
-
-
-Release Notes
--------------
-
-* A simple CLI was added for validation
-* Validation errors now keep full absolute paths and absolute schema paths in
- their ``absolute_path`` and ``absolute_schema_path`` attributes. The ``path``
- and ``schema_path`` attributes are deprecated in favor of ``relative_path``
- and ``relative_schema_path``\ .
-
-*Note:* Support for Python 3.2 was dropped in this release, and installation
-now uses setuptools.
-
-
-Running the Test Suite
-----------------------
-
-``jsonschema`` uses the wonderful `Tox <http://tox.readthedocs.org>`_ for its
-test suite. (It really is wonderful, if for some reason you haven't heard of
-it, you really should use it for your projects).
-
-Assuming you have ``tox`` installed (perhaps via ``pip install tox`` or your
-package manager), just run ``tox`` in the directory of your source checkout to
-run ``jsonschema``'s test suite on all of the versions of Python ``jsonschema``
-supports. Note that you'll need to have all of those versions installed in
-order to run the tests on each of them, otherwise ``tox`` will skip (and fail)
-the tests on that version.
-
-Of course you're also free to just run the tests on a single version with your
-favorite test runner. The tests live in the ``jsonschema.tests`` package.
-
-
-Community
----------
-
-There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
-for this implementation on Google Groups.
-
-Please join, and feel free to send questions there.
-
-
-Contributing
-------------
-
-I'm Julian Berman.
-
-``jsonschema`` is on `GitHub <http://github.com/Julian/jsonschema>`_.
-
-Get in touch, via GitHub or otherwise, if you've got something to contribute,
-it'd be most welcome!
-
-You can also generally find me on Freenode (nick: ``tos9``) in various
-channels, including ``#python``.
-
-If you feel overwhelmingly grateful, you can woo me with beer money on
-`Gittip <https://www.gittip.com/Julian/>`_ or via Google Wallet with the email
-in my GitHub profile.
diff --git a/lib/spack/external/jsonschema/__init__.py b/lib/spack/external/jsonschema/__init__.py
index 6c099f1d8b..6dfdb9419a 100644
--- a/lib/spack/external/jsonschema/__init__.py
+++ b/lib/spack/external/jsonschema/__init__.py
@@ -4,23 +4,34 @@ An implementation of JSON Schema for Python
The main functionality is provided by the validator classes for each of the
supported JSON Schema versions.
-Most commonly, :func:`validate` is the quickest way to simply validate a given
+Most commonly, `validate` is the quickest way to simply validate a given
instance under a schema, and will create a validator for you.
-
"""
from jsonschema.exceptions import (
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
)
from jsonschema._format import (
- FormatChecker, draft3_format_checker, draft4_format_checker,
+ FormatChecker,
+ draft3_format_checker,
+ draft4_format_checker,
+ draft6_format_checker,
+ draft7_format_checker,
)
+from jsonschema._types import TypeChecker
from jsonschema.validators import (
- Draft3Validator, Draft4Validator, RefResolver, validate
+ Draft3Validator,
+ Draft4Validator,
+ Draft6Validator,
+ Draft7Validator,
+ RefResolver,
+ validate,
)
-
-
-__version__ = "2.4.0"
-
-
-# flake8: noqa
+# try:
+# from importlib import metadata
+# except ImportError: # for Python<3.8
+# import importlib_metadata as metadata
+# __version__ = metadata.version("jsonschema")
+# set the version manually here, as we don't install dist-info or egg-info
+# files for vendored spack externals.
+__version__ = '3.2.0'
diff --git a/lib/spack/external/jsonschema/_format.py b/lib/spack/external/jsonschema/_format.py
index bb52d183ad..281a7cfcff 100644
--- a/lib/spack/external/jsonschema/_format.py
+++ b/lib/spack/external/jsonschema/_format.py
@@ -1,6 +1,7 @@
import datetime
import re
import socket
+import struct
from jsonschema.compat import str_types
from jsonschema.exceptions import FormatError
@@ -14,17 +15,19 @@ class FormatChecker(object):
validation. If validation is desired however, instances of this class can
be hooked into validators to enable format validation.
- :class:`FormatChecker` objects always return ``True`` when asked about
+ `FormatChecker` objects always return ``True`` when asked about
formats that they do not know how to validate.
To check a custom format using a function that takes an instance and
- returns a ``bool``, use the :meth:`FormatChecker.checks` or
- :meth:`FormatChecker.cls_checks` decorators.
+ returns a ``bool``, use the `FormatChecker.checks` or
+ `FormatChecker.cls_checks` decorators.
- :argument iterable formats: the known formats to validate. This argument
- can be used to limit which formats will be used
- during validation.
+ Arguments:
+ formats (~collections.Iterable):
+
+ The known formats to validate. This argument can be used to
+ limit which formats will be used during validation.
"""
checkers = {}
@@ -35,16 +38,27 @@ class FormatChecker(object):
else:
self.checkers = dict((k, self.checkers[k]) for k in formats)
+ def __repr__(self):
+ return "<FormatChecker checkers={}>".format(sorted(self.checkers))
+
def checks(self, format, raises=()):
"""
Register a decorated function as validating a new format.
- :argument str format: the format that the decorated function will check
- :argument Exception raises: the exception(s) raised by the decorated
- function when an invalid instance is found. The exception object
- will be accessible as the :attr:`ValidationError.cause` attribute
- of the resulting validation error.
+ Arguments:
+
+ format (str):
+
+ The format that the decorated function will check.
+
+ raises (Exception):
+ The exception(s) raised by the decorated function when an
+ invalid instance is found.
+
+ The exception object will be accessible as the
+ `jsonschema.exceptions.ValidationError.cause` attribute of the
+ resulting validation error.
"""
def _checks(func):
@@ -58,11 +72,20 @@ class FormatChecker(object):
"""
Check whether the instance conforms to the given format.
- :argument instance: the instance to check
- :type: any primitive type (str, number, bool)
- :argument str format: the format that instance should conform to
- :raises: :exc:`FormatError` if instance does not conform to format
+ Arguments:
+
+ instance (*any primitive type*, i.e. str, number, bool):
+
+ The instance to check
+
+ format (str):
+ The format that instance should conform to
+
+
+ Raises:
+
+ FormatError: if the instance does not conform to ``format``
"""
if format not in self.checkers:
@@ -83,11 +106,19 @@ class FormatChecker(object):
"""
Check whether the instance conforms to the given format.
- :argument instance: the instance to check
- :type: any primitive type (str, number, bool)
- :argument str format: the format that instance should conform to
- :rtype: bool
+ Arguments:
+
+ instance (*any primitive type*, i.e. str, number, bool):
+
+ The instance to check
+ format (str):
+
+ The format that instance should conform to
+
+ Returns:
+
+ bool: whether it conformed
"""
try:
@@ -98,25 +129,55 @@ class FormatChecker(object):
return True
-_draft_checkers = {"draft3": [], "draft4": []}
+draft3_format_checker = FormatChecker()
+draft4_format_checker = FormatChecker()
+draft6_format_checker = FormatChecker()
+draft7_format_checker = FormatChecker()
+
+_draft_checkers = dict(
+ draft3=draft3_format_checker,
+ draft4=draft4_format_checker,
+ draft6=draft6_format_checker,
+ draft7=draft7_format_checker,
+)
-def _checks_drafts(both=None, draft3=None, draft4=None, raises=()):
- draft3 = draft3 or both
- draft4 = draft4 or both
+
+def _checks_drafts(
+ name=None,
+ draft3=None,
+ draft4=None,
+ draft6=None,
+ draft7=None,
+ raises=(),
+):
+ draft3 = draft3 or name
+ draft4 = draft4 or name
+ draft6 = draft6 or name
+ draft7 = draft7 or name
def wrap(func):
if draft3:
- _draft_checkers["draft3"].append(draft3)
- func = FormatChecker.cls_checks(draft3, raises)(func)
+ func = _draft_checkers["draft3"].checks(draft3, raises)(func)
if draft4:
- _draft_checkers["draft4"].append(draft4)
- func = FormatChecker.cls_checks(draft4, raises)(func)
+ func = _draft_checkers["draft4"].checks(draft4, raises)(func)
+ if draft6:
+ func = _draft_checkers["draft6"].checks(draft6, raises)(func)
+ if draft7:
+ func = _draft_checkers["draft7"].checks(draft7, raises)(func)
+
+ # Oy. This is bad global state, but relied upon for now, until
+ # deprecation. See https://github.com/Julian/jsonschema/issues/519
+ # and test_format_checkers_come_with_defaults
+ FormatChecker.cls_checks(draft7 or draft6 or draft4 or draft3, raises)(
+ func,
+ )
return func
return wrap
-@_checks_drafts("email")
+@_checks_drafts(name="idn-email")
+@_checks_drafts(name="email")
def is_email(instance):
if not isinstance(instance, str_types):
return True
@@ -125,7 +186,10 @@ def is_email(instance):
_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
-@_checks_drafts(draft3="ip-address", draft4="ipv4")
+
+@_checks_drafts(
+ draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4",
+)
def is_ipv4(instance):
if not isinstance(instance, str_types):
return True
@@ -135,7 +199,11 @@ def is_ipv4(instance):
if hasattr(socket, "inet_pton"):
- @_checks_drafts("ipv6", raises=socket.error)
+ # FIXME: Really this only should raise struct.error, but see the sadness
+ # that is https://twistedmatrix.com/trac/ticket/9409
+ @_checks_drafts(
+ name="ipv6", raises=(socket.error, struct.error, ValueError),
+ )
def is_ipv6(instance):
if not isinstance(instance, str_types):
return True
@@ -144,7 +212,13 @@ if hasattr(socket, "inet_pton"):
_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
-@_checks_drafts(draft3="host-name", draft4="hostname")
+
+@_checks_drafts(
+ draft3="host-name",
+ draft4="hostname",
+ draft6="hostname",
+ draft7="hostname",
+)
def is_host_name(instance):
if not isinstance(instance, str_types):
return True
@@ -158,46 +232,103 @@ def is_host_name(instance):
try:
- import rfc3987
+ # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
+ import idna
except ImportError:
pass
else:
- @_checks_drafts("uri", raises=ValueError)
- def is_uri(instance):
+ @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
+ def is_idn_host_name(instance):
if not isinstance(instance, str_types):
return True
- return rfc3987.parse(instance, rule="URI")
+ idna.encode(instance)
+ return True
try:
- import strict_rfc3339
+ import rfc3987
except ImportError:
try:
- import isodate
+ from rfc3986_validator import validate_rfc3986
except ImportError:
pass
else:
- @_checks_drafts("date-time", raises=(ValueError, isodate.ISO8601Error))
- def is_date(instance):
+ @_checks_drafts(name="uri")
+ def is_uri(instance):
if not isinstance(instance, str_types):
return True
- return isodate.parse_datetime(instance)
-else:
- @_checks_drafts("date-time")
- def is_date(instance):
+ return validate_rfc3986(instance, rule="URI")
+
+ @_checks_drafts(
+ draft6="uri-reference",
+ draft7="uri-reference",
+ raises=ValueError,
+ )
+ def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
- return strict_rfc3339.validate_rfc3339(instance)
+ return validate_rfc3986(instance, rule="URI_reference")
+
+else:
+ @_checks_drafts(draft7="iri", raises=ValueError)
+ def is_iri(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="IRI")
+
+ @_checks_drafts(draft7="iri-reference", raises=ValueError)
+ def is_iri_reference(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="IRI_reference")
+
+ @_checks_drafts(name="uri", raises=ValueError)
+ def is_uri(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="URI")
+
+ @_checks_drafts(
+ draft6="uri-reference",
+ draft7="uri-reference",
+ raises=ValueError,
+ )
+ def is_uri_reference(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="URI_reference")
+
+
+try:
+ from strict_rfc3339 import validate_rfc3339
+except ImportError:
+ try:
+ from rfc3339_validator import validate_rfc3339
+ except ImportError:
+ validate_rfc3339 = None
+
+if validate_rfc3339:
+ @_checks_drafts(name="date-time")
+ def is_datetime(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return validate_rfc3339(instance)
+
+ @_checks_drafts(draft7="time")
+ def is_time(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return is_datetime("1970-01-01T" + instance)
-@_checks_drafts("regex", raises=re.error)
+@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
return True
return re.compile(instance)
-@_checks_drafts(draft3="date", raises=ValueError)
+@_checks_drafts(draft3="date", draft7="date", raises=ValueError)
def is_date(instance):
if not isinstance(instance, str_types):
return True
@@ -205,7 +336,7 @@ def is_date(instance):
@_checks_drafts(draft3="time", raises=ValueError)
-def is_time(instance):
+def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
@@ -219,7 +350,6 @@ else:
def is_css_color_code(instance):
return webcolors.normalize_hex(instance)
-
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
def is_css21_color(instance):
if (
@@ -229,12 +359,67 @@ else:
return True
return is_css_color_code(instance)
-
def is_css3_color(instance):
if instance.lower() in webcolors.css3_names_to_hex:
return True
return is_css_color_code(instance)
-draft3_format_checker = FormatChecker(_draft_checkers["draft3"])
-draft4_format_checker = FormatChecker(_draft_checkers["draft4"])
+try:
+ import jsonpointer
+except ImportError:
+ pass
+else:
+ @_checks_drafts(
+ draft6="json-pointer",
+ draft7="json-pointer",
+ raises=jsonpointer.JsonPointerException,
+ )
+ def is_json_pointer(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return jsonpointer.JsonPointer(instance)
+
+ # TODO: I don't want to maintain this, so it
+ # needs to go either into jsonpointer (pending
+ # https://github.com/stefankoegl/python-json-pointer/issues/34) or
+ # into a new external library.
+ @_checks_drafts(
+ draft7="relative-json-pointer",
+ raises=jsonpointer.JsonPointerException,
+ )
+ def is_relative_json_pointer(instance):
+ # Definition taken from:
+ # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
+ if not isinstance(instance, str_types):
+ return True
+ non_negative_integer, rest = [], ""
+ for i, character in enumerate(instance):
+ if character.isdigit():
+ non_negative_integer.append(character)
+ continue
+
+ if not non_negative_integer:
+ return False
+
+ rest = instance[i:]
+ break
+ return (rest == "#") or jsonpointer.JsonPointer(rest)
+
+
+try:
+ import uritemplate.exceptions
+except ImportError:
+ pass
+else:
+ @_checks_drafts(
+ draft6="uri-template",
+ draft7="uri-template",
+ raises=uritemplate.exceptions.InvalidTemplate,
+ )
+ def is_uri_template(
+ instance,
+ template_validator=uritemplate.Validator().force_balanced_braces(),
+ ):
+ template = uritemplate.URITemplate(instance)
+ return template_validator.validate(template)
diff --git a/lib/spack/external/jsonschema/_legacy_validators.py b/lib/spack/external/jsonschema/_legacy_validators.py
new file mode 100644
index 0000000000..264ff7d713
--- /dev/null
+++ b/lib/spack/external/jsonschema/_legacy_validators.py
@@ -0,0 +1,141 @@
+from jsonschema import _utils
+from jsonschema.compat import iteritems
+from jsonschema.exceptions import ValidationError
+
+
+def dependencies_draft3(validator, dependencies, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, dependency in iteritems(dependencies):
+ if property not in instance:
+ continue
+
+ if validator.is_type(dependency, "object"):
+ for error in validator.descend(
+ instance, dependency, schema_path=property,
+ ):
+ yield error
+ elif validator.is_type(dependency, "string"):
+ if dependency not in instance:
+ yield ValidationError(
+ "%r is a dependency of %r" % (dependency, property)
+ )
+ else:
+ for each in dependency:
+ if each not in instance:
+ message = "%r is a dependency of %r"
+ yield ValidationError(message % (each, property))
+
+
+def disallow_draft3(validator, disallow, instance, schema):
+ for disallowed in _utils.ensure_list(disallow):
+ if validator.is_valid(instance, {"type": [disallowed]}):
+ yield ValidationError(
+ "%r is disallowed for %r" % (disallowed, instance)
+ )
+
+
+def extends_draft3(validator, extends, instance, schema):
+ if validator.is_type(extends, "object"):
+ for error in validator.descend(instance, extends):
+ yield error
+ return
+ for index, subschema in enumerate(extends):
+ for error in validator.descend(instance, subschema, schema_path=index):
+ yield error
+
+
+def items_draft3_draft4(validator, items, instance, schema):
+ if not validator.is_type(instance, "array"):
+ return
+
+ if validator.is_type(items, "object"):
+ for index, item in enumerate(instance):
+ for error in validator.descend(item, items, path=index):
+ yield error
+ else:
+ for (index, item), subschema in zip(enumerate(instance), items):
+ for error in validator.descend(
+ item, subschema, path=index, schema_path=index,
+ ):
+ yield error
+
+
+def minimum_draft3_draft4(validator, minimum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if schema.get("exclusiveMinimum", False):
+ failed = instance <= minimum
+ cmp = "less than or equal to"
+ else:
+ failed = instance < minimum
+ cmp = "less than"
+
+ if failed:
+ yield ValidationError(
+ "%r is %s the minimum of %r" % (instance, cmp, minimum)
+ )
+
+
+def maximum_draft3_draft4(validator, maximum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if schema.get("exclusiveMaximum", False):
+ failed = instance >= maximum
+ cmp = "greater than or equal to"
+ else:
+ failed = instance > maximum
+ cmp = "greater than"
+
+ if failed:
+ yield ValidationError(
+ "%r is %s the maximum of %r" % (instance, cmp, maximum)
+ )
+
+
+def properties_draft3(validator, properties, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, subschema in iteritems(properties):
+ if property in instance:
+ for error in validator.descend(
+ instance[property],
+ subschema,
+ path=property,
+ schema_path=property,
+ ):
+ yield error
+ elif subschema.get("required", False):
+ error = ValidationError("%r is a required property" % property)
+ error._set(
+ validator="required",
+ validator_value=subschema["required"],
+ instance=instance,
+ schema=schema,
+ )
+ error.path.appendleft(property)
+ error.schema_path.extend([property, "required"])
+ yield error
+
+
+def type_draft3(validator, types, instance, schema):
+ types = _utils.ensure_list(types)
+
+ all_errors = []
+ for index, type in enumerate(types):
+ if validator.is_type(type, "object"):
+ errors = list(validator.descend(instance, type, schema_path=index))
+ if not errors:
+ return
+ all_errors.extend(errors)
+ else:
+ if validator.is_type(instance, type):
+ return
+ else:
+ yield ValidationError(
+ _utils.types_msg(instance, types), context=all_errors,
+ )
diff --git a/lib/spack/external/jsonschema/_types.py b/lib/spack/external/jsonschema/_types.py
new file mode 100644
index 0000000000..a71a4e34bd
--- /dev/null
+++ b/lib/spack/external/jsonschema/_types.py
@@ -0,0 +1,188 @@
+import numbers
+
+from pyrsistent import pmap
+import attr
+
+from jsonschema.compat import int_types, str_types
+from jsonschema.exceptions import UndefinedTypeCheck
+
+
+def is_array(checker, instance):
+ return isinstance(instance, list)
+
+
+def is_bool(checker, instance):
+ return isinstance(instance, bool)
+
+
+def is_integer(checker, instance):
+ # bool inherits from int, so ensure bools aren't reported as ints
+ if isinstance(instance, bool):
+ return False
+ return isinstance(instance, int_types)
+
+
+def is_null(checker, instance):
+ return instance is None
+
+
+def is_number(checker, instance):
+ # bool inherits from int, so ensure bools aren't reported as ints
+ if isinstance(instance, bool):
+ return False
+ return isinstance(instance, numbers.Number)
+
+
+def is_object(checker, instance):
+ return isinstance(instance, dict)
+
+
+def is_string(checker, instance):
+ return isinstance(instance, str_types)
+
+
+def is_any(checker, instance):
+ return True
+
+
+@attr.s(frozen=True)
+class TypeChecker(object):
+ """
+ A ``type`` property checker.
+
+ A `TypeChecker` performs type checking for an `IValidator`. Type
+ checks to perform are updated using `TypeChecker.redefine` or
+ `TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
+ Each of these return a new `TypeChecker` object.
+
+ Arguments:
+
+ type_checkers (dict):
+
+ The initial mapping of types to their checking functions.
+ """
+ _type_checkers = attr.ib(default=pmap(), converter=pmap)
+
+ def is_type(self, instance, type):
+ """
+ Check if the instance is of the appropriate type.
+
+ Arguments:
+
+ instance (object):
+
+ The instance to check
+
+ type (str):
+
+ The name of the type that is expected.
+
+ Returns:
+
+ bool: Whether it conformed.
+
+
+ Raises:
+
+ `jsonschema.exceptions.UndefinedTypeCheck`:
+ if type is unknown to this object.
+ """
+ try:
+ fn = self._type_checkers[type]
+ except KeyError:
+ raise UndefinedTypeCheck(type)
+
+ return fn(self, instance)
+
+ def redefine(self, type, fn):
+ """
+ Produce a new checker with the given type redefined.
+
+ Arguments:
+
+ type (str):
+
+ The name of the type to check.
+
+ fn (collections.Callable):
+
+ A function taking exactly two parameters - the type
+ checker calling the function and the instance to check.
+ The function should return true if instance is of this
+ type and false otherwise.
+
+ Returns:
+
+ A new `TypeChecker` instance.
+ """
+ return self.redefine_many({type: fn})
+
+ def redefine_many(self, definitions=()):
+ """
+ Produce a new checker with the given types redefined.
+
+ Arguments:
+
+ definitions (dict):
+
+ A dictionary mapping types to their checking functions.
+
+ Returns:
+
+ A new `TypeChecker` instance.
+ """
+ return attr.evolve(
+ self, type_checkers=self._type_checkers.update(definitions),
+ )
+
+ def remove(self, *types):
+ """
+ Produce a new checker with the given types forgotten.
+
+ Arguments:
+
+ types (~collections.Iterable):
+
+ the names of the types to remove.
+
+ Returns:
+
+ A new `TypeChecker` instance
+
+ Raises:
+
+ `jsonschema.exceptions.UndefinedTypeCheck`:
+
+ if any given type is unknown to this object
+ """
+
+ checkers = self._type_checkers
+ for each in types:
+ try:
+ checkers = checkers.remove(each)
+ except KeyError:
+ raise UndefinedTypeCheck(each)
+ return attr.evolve(self, type_checkers=checkers)
+
+
+draft3_type_checker = TypeChecker(
+ {
+ u"any": is_any,
+ u"array": is_array,
+ u"boolean": is_bool,
+ u"integer": is_integer,
+ u"object": is_object,
+ u"null": is_null,
+ u"number": is_number,
+ u"string": is_string,
+ },
+)
+draft4_type_checker = draft3_type_checker.remove(u"any")
+draft6_type_checker = draft4_type_checker.redefine(
+ u"integer",
+ lambda checker, instance: (
+ is_integer(checker, instance) or
+ isinstance(instance, float) and instance.is_integer()
+ ),
+)
+draft7_type_checker = draft6_type_checker
diff --git a/lib/spack/external/jsonschema/_utils.py b/lib/spack/external/jsonschema/_utils.py
index 2262f3305d..ceb880198d 100644
--- a/lib/spack/external/jsonschema/_utils.py
+++ b/lib/spack/external/jsonschema/_utils.py
@@ -3,13 +3,12 @@ import json
import pkgutil
import re
-from jsonschema.compat import str_types, MutableMapping, urlsplit
+from jsonschema.compat import MutableMapping, str_types, urlsplit
class URIDict(MutableMapping):
"""
Dictionary which uses normalized URIs as keys.
-
"""
def normalize(self, uri):
@@ -41,7 +40,6 @@ class URIDict(MutableMapping):
class Unset(object):
"""
An as-of-yet unset attribute or unprovided default parameter.
-
"""
def __repr__(self):
@@ -51,17 +49,15 @@ class Unset(object):
def load_schema(name):
"""
Load a schema from ./schemas/``name``.json and return it.
-
"""
- data = pkgutil.get_data(__package__, "schemas/{0}.json".format(name))
+ data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
return json.loads(data.decode("utf-8"))
def indent(string, times=1):
"""
- A dumb version of :func:`textwrap.indent` from Python 3.3.
-
+ A dumb version of `textwrap.indent` from Python 3.3.
"""
return "\n".join(" " * (4 * times) + line for line in string.splitlines())
@@ -73,8 +69,11 @@ def format_as_index(indices):
For example, [1, 2, "foo"] -> [1][2]["foo"]
- :type indices: sequence
+ Arguments:
+ indices (sequence):
+
+ The indices to format.
"""
if not indices:
@@ -90,7 +89,6 @@ def find_additional_properties(instance, schema):
/ or ``patternProperties``.
Assumes ``instance`` is dict-like already.
-
"""
properties = schema.get("properties", {})
@@ -105,7 +103,6 @@ def find_additional_properties(instance, schema):
def extras_msg(extras):
"""
Create an error message for extra items or properties.
-
"""
if len(extras) == 1:
@@ -123,7 +120,6 @@ def types_msg(instance, types):
be considered to be a description of that object and used as its type.
Otherwise the message is simply the reprs of the given ``types``.
-
"""
reprs = []
@@ -143,7 +139,6 @@ def flatten(suitable_for_isinstance):
* an arbitrary nested tree of tuples
Return a flattened tuple of the given argument.
-
"""
types = set()
@@ -163,7 +158,6 @@ def ensure_list(thing):
Wrap ``thing`` in a list if it's a single str.
Otherwise, return it unchanged.
-
"""
if isinstance(thing, str_types):
@@ -171,10 +165,16 @@ def ensure_list(thing):
return thing
+def equal(one, two):
+ """
+ Check if two things are equal, but evade booleans and ints being equal.
+ """
+ return unbool(one) == unbool(two)
+
+
def unbool(element, true=object(), false=object()):
"""
A hack to make True and 1 and False and 0 unique for ``uniq``.
-
"""
if element is True:
@@ -191,7 +191,6 @@ def uniq(container):
Successively tries first to rely that the elements are hashable, then
falls back on them being sortable, and finally falls back on brute
force.
-
"""
try:
diff --git a/lib/spack/external/jsonschema/_validators.py b/lib/spack/external/jsonschema/_validators.py
index c6e801ccb2..179fec09a9 100644
--- a/lib/spack/external/jsonschema/_validators.py
+++ b/lib/spack/external/jsonschema/_validators.py
@@ -1,13 +1,18 @@
import re
-from jsonschema import _utils
+from jsonschema._utils import (
+ ensure_list,
+ equal,
+ extras_msg,
+ find_additional_properties,
+ types_msg,
+ unbool,
+ uniq,
+)
from jsonschema.exceptions import FormatError, ValidationError
from jsonschema.compat import iteritems
-FLOAT_TOLERANCE = 10 ** -15
-
-
def patternProperties(validator, patternProperties, instance, schema):
if not validator.is_type(instance, "object"):
return
@@ -21,35 +26,60 @@ def patternProperties(validator, patternProperties, instance, schema):
yield error
+def propertyNames(validator, propertyNames, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property in instance:
+ for error in validator.descend(
+ instance=property,
+ schema=propertyNames,
+ ):
+ yield error
+
+
def additionalProperties(validator, aP, instance, schema):
if not validator.is_type(instance, "object"):
return
- extras = set(_utils.find_additional_properties(instance, schema))
+ extras = set(find_additional_properties(instance, schema))
if validator.is_type(aP, "object"):
for extra in extras:
for error in validator.descend(instance[extra], aP, path=extra):
yield error
elif not aP and extras:
- error = "Additional properties are not allowed (%s %s unexpected)"
- yield ValidationError(error % _utils.extras_msg(extras))
+ if "patternProperties" in schema:
+ patterns = sorted(schema["patternProperties"])
+ if len(extras) == 1:
+ verb = "does"
+ else:
+ verb = "do"
+ error = "%s %s not match any of the regexes: %s" % (
+ ", ".join(map(repr, sorted(extras))),
+ verb,
+ ", ".join(map(repr, patterns)),
+ )
+ yield ValidationError(error)
+ else:
+ error = "Additional properties are not allowed (%s %s unexpected)"
+ yield ValidationError(error % extras_msg(extras))
def items(validator, items, instance, schema):
if not validator.is_type(instance, "array"):
return
- if validator.is_type(items, "object"):
- for index, item in enumerate(instance):
- for error in validator.descend(item, items, path=index):
- yield error
- else:
+ if validator.is_type(items, "array"):
for (index, item), subschema in zip(enumerate(instance), items):
for error in validator.descend(
item, subschema, path=index, schema_path=index,
):
yield error
+ else:
+ for index, item in enumerate(instance):
+ for error in validator.descend(item, items, path=index):
+ yield error
def additionalItems(validator, aI, instance, schema):
@@ -68,41 +98,66 @@ def additionalItems(validator, aI, instance, schema):
error = "Additional items are not allowed (%s %s unexpected)"
yield ValidationError(
error %
- _utils.extras_msg(instance[len(schema.get("items", [])):])
+ extras_msg(instance[len(schema.get("items", [])):])
)
-def minimum(validator, minimum, instance, schema):
+def const(validator, const, instance, schema):
+ if not equal(instance, const):
+ yield ValidationError("%r was expected" % (const,))
+
+
+def contains(validator, contains, instance, schema):
+ if not validator.is_type(instance, "array"):
+ return
+
+ if not any(validator.is_valid(element, contains) for element in instance):
+ yield ValidationError(
+ "None of %r are valid under the given schema" % (instance,)
+ )
+
+
+def exclusiveMinimum(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
- if schema.get("exclusiveMinimum", False):
- failed = float(instance) <= minimum
- cmp = "less than or equal to"
- else:
- failed = float(instance) < minimum
- cmp = "less than"
+ if instance <= minimum:
+ yield ValidationError(
+ "%r is less than or equal to the minimum of %r" % (
+ instance, minimum,
+ ),
+ )
- if failed:
+
+def exclusiveMaximum(validator, maximum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if instance >= maximum:
yield ValidationError(
- "%r is %s the minimum of %r" % (instance, cmp, minimum)
+ "%r is greater than or equal to the maximum of %r" % (
+ instance, maximum,
+ ),
)
-def maximum(validator, maximum, instance, schema):
+def minimum(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
- if schema.get("exclusiveMaximum", False):
- failed = instance >= maximum
- cmp = "greater than or equal to"
- else:
- failed = instance > maximum
- cmp = "greater than"
+ if instance < minimum:
+ yield ValidationError(
+ "%r is less than the minimum of %r" % (instance, minimum)
+ )
- if failed:
+
+def maximum(validator, maximum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if instance > maximum:
yield ValidationError(
- "%r is %s the maximum of %r" % (instance, cmp, maximum)
+ "%r is greater than the maximum of %r" % (instance, maximum)
)
@@ -111,8 +166,8 @@ def multipleOf(validator, dB, instance, schema):
return
if isinstance(dB, float):
- mod = instance % dB
- failed = (mod > FLOAT_TOLERANCE) and (dB - mod) > FLOAT_TOLERANCE
+ quotient = instance / dB
+ failed = int(quotient) != quotient
else:
failed = instance % dB
@@ -134,9 +189,9 @@ def uniqueItems(validator, uI, instance, schema):
if (
uI and
validator.is_type(instance, "array") and
- not _utils.uniq(instance)
+ not uniq(instance)
):
- yield ValidationError("%r has non-unique elements" % instance)
+ yield ValidationError("%r has non-unique elements" % (instance,))
def pattern(validator, patrn, instance, schema):
@@ -173,104 +228,52 @@ def dependencies(validator, dependencies, instance, schema):
if property not in instance:
continue
- if validator.is_type(dependency, "object"):
+ if validator.is_type(dependency, "array"):
+ for each in dependency:
+ if each not in instance:
+ message = "%r is a dependency of %r"
+ yield ValidationError(message % (each, property))
+ else:
for error in validator.descend(
instance, dependency, schema_path=property,
):
yield error
- else:
- dependencies = _utils.ensure_list(dependency)
- for dependency in dependencies:
- if dependency not in instance:
- yield ValidationError(
- "%r is a dependency of %r" % (dependency, property)
- )
def enum(validator, enums, instance, schema):
- if instance not in enums:
+ if instance == 0 or instance == 1:
+ unbooled = unbool(instance)
+ if all(unbooled != unbool(each) for each in enums):
+ yield ValidationError("%r is not one of %r" % (instance, enums))
+ elif instance not in enums:
yield ValidationError("%r is not one of %r" % (instance, enums))
def ref(validator, ref, instance, schema):
- with validator.resolver.resolving(ref) as resolved:
- for error in validator.descend(instance, resolved):
- yield error
-
-
-def type_draft3(validator, types, instance, schema):
- types = _utils.ensure_list(types)
-
- all_errors = []
- for index, type in enumerate(types):
- if type == "any":
- return
- if validator.is_type(type, "object"):
- errors = list(validator.descend(instance, type, schema_path=index))
- if not errors:
- return
- all_errors.extend(errors)
- else:
- if validator.is_type(instance, type):
- return
+ resolve = getattr(validator.resolver, "resolve", None)
+ if resolve is None:
+ with validator.resolver.resolving(ref) as resolved:
+ for error in validator.descend(instance, resolved):
+ yield error
else:
- yield ValidationError(
- _utils.types_msg(instance, types), context=all_errors,
- )
-
-
-def properties_draft3(validator, properties, instance, schema):
- if not validator.is_type(instance, "object"):
- return
+ scope, resolved = validator.resolver.resolve(ref)
+ validator.resolver.push_scope(scope)
- for property, subschema in iteritems(properties):
- if property in instance:
- for error in validator.descend(
- instance[property],
- subschema,
- path=property,
- schema_path=property,
- ):
+ try:
+ for error in validator.descend(instance, resolved):
yield error
- elif subschema.get("required", False):
- error = ValidationError("%r is a required property" % property)
- error._set(
- validator="required",
- validator_value=subschema["required"],
- instance=instance,
- schema=schema,
- )
- error.path.appendleft(property)
- error.schema_path.extend([property, "required"])
- yield error
-
-
-def disallow_draft3(validator, disallow, instance, schema):
- for disallowed in _utils.ensure_list(disallow):
- if validator.is_valid(instance, {"type" : [disallowed]}):
- yield ValidationError(
- "%r is disallowed for %r" % (disallowed, instance)
- )
+ finally:
+ validator.resolver.pop_scope()
-def extends_draft3(validator, extends, instance, schema):
- if validator.is_type(extends, "object"):
- for error in validator.descend(instance, extends):
- yield error
- return
- for index, subschema in enumerate(extends):
- for error in validator.descend(instance, subschema, schema_path=index):
- yield error
-
-
-def type_draft4(validator, types, instance, schema):
- types = _utils.ensure_list(types)
+def type(validator, types, instance, schema):
+ types = ensure_list(types)
if not any(validator.is_type(instance, type) for type in types):
- yield ValidationError(_utils.types_msg(instance, types))
+ yield ValidationError(types_msg(instance, types))
-def properties_draft4(validator, properties, instance, schema):
+def properties(validator, properties, instance, schema):
if not validator.is_type(instance, "object"):
return
@@ -285,7 +288,7 @@ def properties_draft4(validator, properties, instance, schema):
yield error
-def required_draft4(validator, required, instance, schema):
+def required(validator, required, instance, schema):
if not validator.is_type(instance, "object"):
return
for property in required:
@@ -293,33 +296,31 @@ def required_draft4(validator, required, instance, schema):
yield ValidationError("%r is a required property" % property)
-def minProperties_draft4(validator, mP, instance, schema):
+def minProperties(validator, mP, instance, schema):
if validator.is_type(instance, "object") and len(instance) < mP:
yield ValidationError(
"%r does not have enough properties" % (instance,)
)
-def maxProperties_draft4(validator, mP, instance, schema):
+def maxProperties(validator, mP, instance, schema):
if not validator.is_type(instance, "object"):
return
if validator.is_type(instance, "object") and len(instance) > mP:
yield ValidationError("%r has too many properties" % (instance,))
-def allOf_draft4(validator, allOf, instance, schema):
+def allOf(validator, allOf, instance, schema):
for index, subschema in enumerate(allOf):
for error in validator.descend(instance, subschema, schema_path=index):
yield error
-def oneOf_draft4(validator, oneOf, instance, schema):
- subschemas = enumerate(oneOf)
+def anyOf(validator, anyOf, instance, schema):
all_errors = []
- for index, subschema in subschemas:
+ for index, subschema in enumerate(anyOf):
errs = list(validator.descend(instance, subschema, schema_path=index))
if not errs:
- first_valid = subschema
break
all_errors.extend(errs)
else:
@@ -328,20 +329,14 @@ def oneOf_draft4(validator, oneOf, instance, schema):
context=all_errors,
)
- more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
- if more_valid:
- more_valid.append(first_valid)
- reprs = ", ".join(repr(schema) for schema in more_valid)
- yield ValidationError(
- "%r is valid under each of %s" % (instance, reprs)
- )
-
-def anyOf_draft4(validator, anyOf, instance, schema):
+def oneOf(validator, oneOf, instance, schema):
+ subschemas = enumerate(oneOf)
all_errors = []
- for index, subschema in enumerate(anyOf):
+ for index, subschema in subschemas:
errs = list(validator.descend(instance, subschema, schema_path=index))
if not errs:
+ first_valid = subschema
break
all_errors.extend(errs)
else:
@@ -350,9 +345,29 @@ def anyOf_draft4(validator, anyOf, instance, schema):
context=all_errors,
)
+ more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
+ if more_valid:
+ more_valid.append(first_valid)
+ reprs = ", ".join(repr(schema) for schema in more_valid)
+ yield ValidationError(
+ "%r is valid under each of %s" % (instance, reprs)
+ )
+
-def not_draft4(validator, not_schema, instance, schema):
+def not_(validator, not_schema, instance, schema):
if validator.is_valid(instance, not_schema):
yield ValidationError(
"%r is not allowed for %r" % (not_schema, instance)
)
+
+
+def if_(validator, if_schema, instance, schema):
+ if validator.is_valid(instance, if_schema):
+ if u"then" in schema:
+ then = schema[u"then"]
+ for error in validator.descend(instance, then, schema_path="then"):
+ yield error
+ elif u"else" in schema:
+ else_ = schema[u"else"]
+ for error in validator.descend(instance, else_, schema_path="else"):
+ yield error
diff --git a/lib/spack/external/jsonschema/cli.py b/lib/spack/external/jsonschema/cli.py
index 0126564f46..ab3335b27c 100644
--- a/lib/spack/external/jsonschema/cli.py
+++ b/lib/spack/external/jsonschema/cli.py
@@ -1,8 +1,12 @@
+"""
+The ``jsonschema`` command line.
+"""
from __future__ import absolute_import
import argparse
import json
import sys
+from jsonschema import __version__
from jsonschema._reflect import namedAny
from jsonschema.validators import validator_for
@@ -26,26 +30,37 @@ parser.add_argument(
action="append",
dest="instances",
type=_json_file,
- help="a path to a JSON instance to validate "
- "(may be specified multiple times)",
+ help=(
+ "a path to a JSON instance (i.e. filename.json) "
+ "to validate (may be specified multiple times)"
+ ),
)
parser.add_argument(
"-F", "--error-format",
default="{error.instance}: {error.message}\n",
- help="the format to use for each error output message, specified in "
- "a form suitable for passing to str.format, which will be called "
- "with 'error' for each error",
+ help=(
+ "the format to use for each error output message, specified in "
+ "a form suitable for passing to str.format, which will be called "
+ "with 'error' for each error"
+ ),
)
parser.add_argument(
"-V", "--validator",
type=_namedAnyWithDefault,
- help="the fully qualified object name of a validator to use, or, for "
- "validators that are registered with jsonschema, simply the name "
- "of the class.",
+ help=(
+ "the fully qualified object name of a validator to use, or, for "
+ "validators that are registered with jsonschema, simply the name "
+ "of the class."
+ ),
+)
+parser.add_argument(
+ "--version",
+ action="version",
+ version=__version__,
)
parser.add_argument(
"schema",
- help="the JSON Schema to validate with",
+ help="the JSON Schema to validate with (i.e. schema.json)",
type=_json_file,
)
@@ -64,6 +79,9 @@ def main(args=sys.argv[1:]):
def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
error_format = arguments["error_format"]
validator = arguments["validator"](schema=arguments["schema"])
+
+ validator.check_schema(arguments["schema"])
+
errored = False
for instance in arguments["instances"] or ():
for error in validator.iter_errors(instance):
diff --git a/lib/spack/external/jsonschema/compat.py b/lib/spack/external/jsonschema/compat.py
index 6ca49ab6be..47e0980455 100644
--- a/lib/spack/external/jsonschema/compat.py
+++ b/lib/spack/external/jsonschema/compat.py
@@ -1,52 +1,54 @@
-from __future__ import unicode_literals
-import sys
+"""
+Python 2/3 compatibility helpers.
+
+Note: This module is *not* public API.
+"""
+import contextlib
import operator
+import sys
+
try:
- from collections import MutableMapping, Sequence # noqa
-except ImportError:
from collections.abc import MutableMapping, Sequence # noqa
+except ImportError:
+ from collections import MutableMapping, Sequence # noqa
PY3 = sys.version_info[0] >= 3
if PY3:
zip = zip
- from io import StringIO
+ from functools import lru_cache
+ from io import StringIO as NativeIO
from urllib.parse import (
- unquote, urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit
+ unquote, urljoin, urlunsplit, SplitResult, urlsplit
)
- from urllib.request import urlopen
+ from urllib.request import pathname2url, urlopen
str_types = str,
int_types = int,
iteritems = operator.methodcaller("items")
else:
from itertools import izip as zip # noqa
- from StringIO import StringIO
- from urlparse import (
- urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit # noqa
- )
- from urllib import unquote # noqa
- from urllib2 import urlopen # noqa
+ from io import BytesIO as NativeIO
+ from urlparse import urljoin, urlunsplit, SplitResult, urlsplit
+ from urllib import pathname2url, unquote # noqa
+ import urllib2 # noqa
+ def urlopen(*args, **kwargs):
+ return contextlib.closing(urllib2.urlopen(*args, **kwargs))
+
str_types = basestring
int_types = int, long
iteritems = operator.methodcaller("iteritems")
-
-# On python < 3.3 fragments are not handled properly with unknown schemes
-def urlsplit(url):
- scheme, netloc, path, query, fragment = _urlsplit(url)
- if "#" in path:
- path, fragment = path.split("#", 1)
- return SplitResult(scheme, netloc, path, query, fragment)
+ from functools32 import lru_cache
def urldefrag(url):
if "#" in url:
s, n, p, q, frag = urlsplit(url)
- defrag = urlunsplit((s, n, p, q, ''))
+ defrag = urlunsplit((s, n, p, q, ""))
else:
defrag = url
- frag = ''
+ frag = ""
return defrag, frag
diff --git a/lib/spack/external/jsonschema/exceptions.py b/lib/spack/external/jsonschema/exceptions.py
index 478e59c531..691dcffe6c 100644
--- a/lib/spack/external/jsonschema/exceptions.py
+++ b/lib/spack/external/jsonschema/exceptions.py
@@ -1,8 +1,13 @@
+"""
+Validation errors, and some surrounding helpers.
+"""
from collections import defaultdict, deque
import itertools
import pprint
import textwrap
+import attr
+
from jsonschema import _utils
from jsonschema.compat import PY3, iteritems
@@ -27,6 +32,18 @@ class _Error(Exception):
schema_path=(),
parent=None,
):
+ super(_Error, self).__init__(
+ message,
+ validator,
+ path,
+ cause,
+ context,
+ validator_value,
+ instance,
+ schema,
+ schema_path,
+ parent,
+ )
self.message = message
self.path = self.relative_path = deque(path)
self.schema_path = self.relative_schema_path = deque(schema_path)
@@ -44,9 +61,6 @@ class _Error(Exception):
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.message)
- def __str__(self):
- return unicode(self).encode("utf-8")
-
def __unicode__(self):
essential_for_verbose = (
self.validator, self.validator_value, self.instance, self.schema,
@@ -58,22 +72,27 @@ class _Error(Exception):
pinstance = pprint.pformat(self.instance, width=72)
return self.message + textwrap.dedent("""
- Failed validating %r in schema%s:
+ Failed validating %r in %s%s:
%s
- On instance%s:
+ On %s%s:
%s
""".rstrip()
) % (
self.validator,
+ self._word_for_schema_in_error_message,
_utils.format_as_index(list(self.relative_schema_path)[:-1]),
_utils.indent(pschema),
+ self._word_for_instance_in_error_message,
_utils.format_as_index(self.relative_path),
_utils.indent(pinstance),
)
if PY3:
__str__ = __unicode__
+ else:
+ def __str__(self):
+ return unicode(self).encode("utf-8")
@classmethod
def create_from(cls, other):
@@ -86,7 +105,7 @@ class _Error(Exception):
return self.relative_path
path = deque(self.relative_path)
- path.extendleft(parent.absolute_path)
+ path.extendleft(reversed(parent.absolute_path))
return path
@property
@@ -96,7 +115,7 @@ class _Error(Exception):
return self.relative_schema_path
path = deque(self.relative_schema_path)
- path.extendleft(parent.absolute_schema_path)
+ path.extendleft(reversed(parent.absolute_schema_path))
return path
def _set(self, **kwargs):
@@ -113,26 +132,63 @@ class _Error(Exception):
class ValidationError(_Error):
- pass
+ """
+ An instance was invalid under a provided schema.
+ """
+
+ _word_for_schema_in_error_message = "schema"
+ _word_for_instance_in_error_message = "instance"
class SchemaError(_Error):
- pass
+ """
+ A schema was invalid under its corresponding metaschema.
+ """
+
+ _word_for_schema_in_error_message = "metaschema"
+ _word_for_instance_in_error_message = "schema"
+@attr.s(hash=True)
class RefResolutionError(Exception):
- pass
+ """
+ A ref could not be resolved.
+ """
+
+ _cause = attr.ib()
+
+ def __str__(self):
+ return str(self._cause)
+
+
+class UndefinedTypeCheck(Exception):
+ """
+ A type checker was asked to check a type it did not have registered.
+ """
+
+ def __init__(self, type):
+ self.type = type
+
+ def __unicode__(self):
+ return "Type %r is unknown to this type checker" % self.type
+
+ if PY3:
+ __str__ = __unicode__
+ else:
+ def __str__(self):
+ return unicode(self).encode("utf-8")
class UnknownType(Exception):
+ """
+ A validator was asked to validate an instance against an unknown type.
+ """
+
def __init__(self, type, instance, schema):
self.type = type
self.instance = instance
self.schema = schema
- def __str__(self):
- return unicode(self).encode("utf-8")
-
def __unicode__(self):
pschema = pprint.pformat(self.schema, width=72)
pinstance = pprint.pformat(self.instance, width=72)
@@ -147,29 +203,34 @@ class UnknownType(Exception):
if PY3:
__str__ = __unicode__
-
+ else:
+ def __str__(self):
+ return unicode(self).encode("utf-8")
class FormatError(Exception):
+ """
+ Validating a format failed.
+ """
+
def __init__(self, message, cause=None):
super(FormatError, self).__init__(message, cause)
self.message = message
self.cause = self.__cause__ = cause
- def __str__(self):
- return self.message.encode("utf-8")
-
def __unicode__(self):
return self.message
if PY3:
__str__ = __unicode__
+ else:
+ def __str__(self):
+ return self.message.encode("utf-8")
class ErrorTree(object):
"""
ErrorTrees make it easier to check which validations failed.
-
"""
_instance = _unset
@@ -184,12 +245,11 @@ class ErrorTree(object):
container = container[element]
container.errors[error.validator] = error
- self._instance = error.instance
+ container._instance = error.instance
def __contains__(self, index):
"""
Check whether ``instance[index]`` has any errors.
-
"""
return index in self._contents
@@ -201,8 +261,7 @@ class ErrorTree(object):
If the index is not in the instance that this tree corresponds to and
is not known by this tree, whatever error would be raised by
``instance.__getitem__`` will be propagated (usually this is some
- subclass of :class:`LookupError`.
-
+ subclass of `exceptions.LookupError`.
"""
if self._instance is not _unset and index not in self:
@@ -210,22 +269,22 @@ class ErrorTree(object):
return self._contents[index]
def __setitem__(self, index, value):
+ """
+ Add an error to the tree at the given ``index``.
+ """
self._contents[index] = value
def __iter__(self):
"""
Iterate (non-recursively) over the indices in the instance with errors.
-
"""
return iter(self._contents)
def __len__(self):
"""
- Same as :attr:`total_errors`.
-
+ Return the `total_errors`.
"""
-
return self.total_errors
def __repr__(self):
@@ -235,7 +294,6 @@ class ErrorTree(object):
def total_errors(self):
"""
The total number of errors in the entire tree, including children.
-
"""
child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
@@ -243,6 +301,21 @@ class ErrorTree(object):
def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
+ """
+ Create a key function that can be used to sort errors by relevance.
+
+ Arguments:
+ weak (set):
+ a collection of validator names to consider to be "weak".
+ If there are two errors at the same level of the instance
+ and one is in the set of weak validator names, the other
+ error will take priority. By default, :validator:`anyOf` and
+ :validator:`oneOf` are considered weak validators and will
+ be superseded by other same-level validation errors.
+
+ strong (set):
+ a collection of validator names to consider to be "strong"
+ """
def relevance(error):
validator = error.validator
return -len(error.path), validator not in weak, validator in strong
@@ -253,6 +326,43 @@ relevance = by_relevance()
def best_match(errors, key=relevance):
+ """
+ Try to find an error that appears to be the best match among given errors.
+
+ In general, errors that are higher up in the instance (i.e. for which
+ `ValidationError.path` is shorter) are considered better matches,
+ since they indicate "more" is wrong with the instance.
+
+ If the resulting match is either :validator:`oneOf` or :validator:`anyOf`,
+ the *opposite* assumption is made -- i.e. the deepest error is picked,
+ since these validators only need to match once, and any other errors may
+ not be relevant.
+
+ Arguments:
+ errors (collections.Iterable):
+
+ the errors to select from. Do not provide a mixture of
+ errors from different validation attempts (i.e. from
+ different instances or schemas), since it won't produce
+ sensical output.
+
+ key (collections.Callable):
+
+ the key to use when sorting errors. See `relevance` and
+ transitively `by_relevance` for more details (the default is
+ to sort with the defaults of that function). Changing the
+ default is only useful if you want to change the function
+ that rates errors but still want the error context descent
+ done by this function.
+
+ Returns:
+ the best matching error, or ``None`` if the iterable was empty
+
+ .. note::
+
+ This function is a heuristic. Its return value may change for a given
+ set of inputs from version to version if better heuristics are added.
+ """
errors = iter(errors)
best = next(errors, None)
if best is None:
diff --git a/lib/spack/external/jsonschema/schemas/draft3.json b/lib/spack/external/jsonschema/schemas/draft3.json
index 5bcefe30d5..f8a09c563b 100644
--- a/lib/spack/external/jsonschema/schemas/draft3.json
+++ b/lib/spack/external/jsonschema/schemas/draft3.json
@@ -80,9 +80,7 @@
"type": "number"
},
"enum": {
- "minItems": 1,
- "type": "array",
- "uniqueItems": true
+ "type": "array"
},
"exclusiveMaximum": {
"default": false,
diff --git a/lib/spack/external/jsonschema/schemas/draft4.json b/lib/spack/external/jsonschema/schemas/draft4.json
index fead5cefab..9b666cff88 100644
--- a/lib/spack/external/jsonschema/schemas/draft4.json
+++ b/lib/spack/external/jsonschema/schemas/draft4.json
@@ -111,9 +111,7 @@
"type": "string"
},
"enum": {
- "minItems": 1,
- "type": "array",
- "uniqueItems": true
+ "type": "array"
},
"exclusiveMaximum": {
"default": false,
@@ -123,6 +121,9 @@
"default": false,
"type": "boolean"
},
+ "format": {
+ "type": "string"
+ },
"id": {
"format": "uri",
"type": "string"
diff --git a/lib/spack/external/jsonschema/schemas/draft6.json b/lib/spack/external/jsonschema/schemas/draft6.json
new file mode 100644
index 0000000000..a0d2bf7896
--- /dev/null
+++ b/lib/spack/external/jsonschema/schemas/draft6.json
@@ -0,0 +1,153 @@
+{
+ "$schema": "http://json-schema.org/draft-06/schema#",
+ "$id": "http://json-schema.org/draft-06/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": {},
+ "examples": {
+ "type": "array",
+ "items": {}
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": {}
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": {},
+ "enum": {
+ "type": "array"
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": {}
+}
diff --git a/lib/spack/external/jsonschema/schemas/draft7.json b/lib/spack/external/jsonschema/schemas/draft7.json
new file mode 100644
index 0000000000..746cde9690
--- /dev/null
+++ b/lib/spack/external/jsonschema/schemas/draft7.json
@@ -0,0 +1,166 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "http://json-schema.org/draft-07/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": true
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "contentMediaType": { "type": "string" },
+ "contentEncoding": { "type": "string" },
+ "if": {"$ref": "#"},
+ "then": {"$ref": "#"},
+ "else": {"$ref": "#"},
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": true
+}
diff --git a/lib/spack/external/jsonschema/tests/__init__.py b/lib/spack/external/jsonschema/tests/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/spack/external/jsonschema/tests/__init__.py
+++ /dev/null
diff --git a/lib/spack/external/jsonschema/tests/compat.py b/lib/spack/external/jsonschema/tests/compat.py
deleted file mode 100644
index b37483f5dd..0000000000
--- a/lib/spack/external/jsonschema/tests/compat.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import sys
-
-
-if sys.version_info[:2] < (2, 7): # pragma: no cover
- import unittest2 as unittest
-else:
- import unittest
-
-try:
- from unittest import mock
-except ImportError:
- import mock
-
-
-# flake8: noqa
diff --git a/lib/spack/external/jsonschema/tests/test_cli.py b/lib/spack/external/jsonschema/tests/test_cli.py
deleted file mode 100644
index f625ca989d..0000000000
--- a/lib/spack/external/jsonschema/tests/test_cli.py
+++ /dev/null
@@ -1,110 +0,0 @@
-from jsonschema import Draft4Validator, ValidationError, cli
-from jsonschema.compat import StringIO
-from jsonschema.tests.compat import mock, unittest
-
-
-def fake_validator(*errors):
- errors = list(reversed(errors))
-
- class FakeValidator(object):
- def __init__(self, *args, **kwargs):
- pass
-
- def iter_errors(self, instance):
- if errors:
- return errors.pop()
- return []
- return FakeValidator
-
-
-class TestParser(unittest.TestCase):
- FakeValidator = fake_validator()
-
- def setUp(self):
- mock_open = mock.mock_open()
- patch_open = mock.patch.object(cli, "open", mock_open, create=True)
- patch_open.start()
- self.addCleanup(patch_open.stop)
-
- mock_json_load = mock.Mock()
- mock_json_load.return_value = {}
- patch_json_load = mock.patch("json.load")
- patch_json_load.start()
- self.addCleanup(patch_json_load.stop)
-
- def test_find_validator_by_fully_qualified_object_name(self):
- arguments = cli.parse_args(
- [
- "--validator",
- "jsonschema.tests.test_cli.TestParser.FakeValidator",
- "--instance", "foo.json",
- "schema.json",
- ]
- )
- self.assertIs(arguments["validator"], self.FakeValidator)
-
- def test_find_validator_in_jsonschema(self):
- arguments = cli.parse_args(
- [
- "--validator", "Draft4Validator",
- "--instance", "foo.json",
- "schema.json",
- ]
- )
- self.assertIs(arguments["validator"], Draft4Validator)
-
-
-class TestCLI(unittest.TestCase):
- def test_successful_validation(self):
- stdout, stderr = StringIO(), StringIO()
- exit_code = cli.run(
- {
- "validator": fake_validator(),
- "schema": {},
- "instances": [1],
- "error_format": "{error.message}",
- },
- stdout=stdout,
- stderr=stderr,
- )
- self.assertFalse(stdout.getvalue())
- self.assertFalse(stderr.getvalue())
- self.assertEqual(exit_code, 0)
-
- def test_unsuccessful_validation(self):
- error = ValidationError("I am an error!", instance=1)
- stdout, stderr = StringIO(), StringIO()
- exit_code = cli.run(
- {
- "validator": fake_validator([error]),
- "schema": {},
- "instances": [1],
- "error_format": "{error.instance} - {error.message}",
- },
- stdout=stdout,
- stderr=stderr,
- )
- self.assertFalse(stdout.getvalue())
- self.assertEqual(stderr.getvalue(), "1 - I am an error!")
- self.assertEqual(exit_code, 1)
-
- def test_unsuccessful_validation_multiple_instances(self):
- first_errors = [
- ValidationError("9", instance=1),
- ValidationError("8", instance=1),
- ]
- second_errors = [ValidationError("7", instance=2)]
- stdout, stderr = StringIO(), StringIO()
- exit_code = cli.run(
- {
- "validator": fake_validator(first_errors, second_errors),
- "schema": {},
- "instances": [1, 2],
- "error_format": "{error.instance} - {error.message}\t",
- },
- stdout=stdout,
- stderr=stderr,
- )
- self.assertFalse(stdout.getvalue())
- self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
- self.assertEqual(exit_code, 1)
diff --git a/lib/spack/external/jsonschema/tests/test_exceptions.py b/lib/spack/external/jsonschema/tests/test_exceptions.py
deleted file mode 100644
index 9e5793c628..0000000000
--- a/lib/spack/external/jsonschema/tests/test_exceptions.py
+++ /dev/null
@@ -1,382 +0,0 @@
-import textwrap
-
-from jsonschema import Draft4Validator, exceptions
-from jsonschema.compat import PY3
-from jsonschema.tests.compat import mock, unittest
-
-
-class TestBestMatch(unittest.TestCase):
- def best_match(self, errors):
- errors = list(errors)
- best = exceptions.best_match(errors)
- reversed_best = exceptions.best_match(reversed(errors))
- self.assertEqual(
- best,
- reversed_best,
- msg="Didn't return a consistent best match!\n"
- "Got: {0}\n\nThen: {1}".format(best, reversed_best),
- )
- return best
-
- def test_shallower_errors_are_better_matches(self):
- validator = Draft4Validator(
- {
- "properties" : {
- "foo" : {
- "minProperties" : 2,
- "properties" : {"bar" : {"type" : "object"}},
- }
- }
- }
- )
- best = self.best_match(validator.iter_errors({"foo" : {"bar" : []}}))
- self.assertEqual(best.validator, "minProperties")
-
- def test_oneOf_and_anyOf_are_weak_matches(self):
- """
- A property you *must* match is probably better than one you have to
- match a part of.
-
- """
-
- validator = Draft4Validator(
- {
- "minProperties" : 2,
- "anyOf" : [{"type" : "string"}, {"type" : "number"}],
- "oneOf" : [{"type" : "string"}, {"type" : "number"}],
- }
- )
- best = self.best_match(validator.iter_errors({}))
- self.assertEqual(best.validator, "minProperties")
-
- def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
- """
- If the most relevant error is an anyOf, then we traverse its context
- and select the otherwise *least* relevant error, since in this case
- that means the most specific, deep, error inside the instance.
-
- I.e. since only one of the schemas must match, we look for the most
- relevant one.
-
- """
-
- validator = Draft4Validator(
- {
- "properties" : {
- "foo" : {
- "anyOf" : [
- {"type" : "string"},
- {"properties" : {"bar" : {"type" : "array"}}},
- ],
- },
- },
- },
- )
- best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
- self.assertEqual(best.validator_value, "array")
-
- def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
- """
- If the most relevant error is an oneOf, then we traverse its context
- and select the otherwise *least* relevant error, since in this case
- that means the most specific, deep, error inside the instance.
-
- I.e. since only one of the schemas must match, we look for the most
- relevant one.
-
- """
-
- validator = Draft4Validator(
- {
- "properties" : {
- "foo" : {
- "oneOf" : [
- {"type" : "string"},
- {"properties" : {"bar" : {"type" : "array"}}},
- ],
- },
- },
- },
- )
- best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
- self.assertEqual(best.validator_value, "array")
-
- def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
- """
- Now, if the error is allOf, we traverse but select the *most* relevant
- error from the context, because all schemas here must match anyways.
-
- """
-
- validator = Draft4Validator(
- {
- "properties" : {
- "foo" : {
- "allOf" : [
- {"type" : "string"},
- {"properties" : {"bar" : {"type" : "array"}}},
- ],
- },
- },
- },
- )
- best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
- self.assertEqual(best.validator_value, "string")
-
- def test_nested_context_for_oneOf(self):
- validator = Draft4Validator(
- {
- "properties" : {
- "foo" : {
- "oneOf" : [
- {"type" : "string"},
- {
- "oneOf" : [
- {"type" : "string"},
- {
- "properties" : {
- "bar" : {"type" : "array"}
- },
- },
- ],
- },
- ],
- },
- },
- },
- )
- best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
- self.assertEqual(best.validator_value, "array")
-
- def test_one_error(self):
- validator = Draft4Validator({"minProperties" : 2})
- error, = validator.iter_errors({})
- self.assertEqual(
- exceptions.best_match(validator.iter_errors({})).validator,
- "minProperties",
- )
-
- def test_no_errors(self):
- validator = Draft4Validator({})
- self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
-
-
-class TestByRelevance(unittest.TestCase):
- def test_short_paths_are_better_matches(self):
- shallow = exceptions.ValidationError("Oh no!", path=["baz"])
- deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
- match = max([shallow, deep], key=exceptions.relevance)
- self.assertIs(match, shallow)
-
- match = max([deep, shallow], key=exceptions.relevance)
- self.assertIs(match, shallow)
-
- def test_global_errors_are_even_better_matches(self):
- shallow = exceptions.ValidationError("Oh no!", path=[])
- deep = exceptions.ValidationError("Oh yes!", path=["foo"])
-
- errors = sorted([shallow, deep], key=exceptions.relevance)
- self.assertEqual(
- [list(error.path) for error in errors],
- [["foo"], []],
- )
-
- errors = sorted([deep, shallow], key=exceptions.relevance)
- self.assertEqual(
- [list(error.path) for error in errors],
- [["foo"], []],
- )
-
- def test_weak_validators_are_lower_priority(self):
- weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
- normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
-
- best_match = exceptions.by_relevance(weak="a")
-
- match = max([weak, normal], key=best_match)
- self.assertIs(match, normal)
-
- match = max([normal, weak], key=best_match)
- self.assertIs(match, normal)
-
- def test_strong_validators_are_higher_priority(self):
- weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
- normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
- strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
-
- best_match = exceptions.by_relevance(weak="a", strong="c")
-
- match = max([weak, normal, strong], key=best_match)
- self.assertIs(match, strong)
-
- match = max([strong, normal, weak], key=best_match)
- self.assertIs(match, strong)
-
-
-class TestErrorTree(unittest.TestCase):
- def test_it_knows_how_many_total_errors_it_contains(self):
- errors = [mock.MagicMock() for _ in range(8)]
- tree = exceptions.ErrorTree(errors)
- self.assertEqual(tree.total_errors, 8)
-
- def test_it_contains_an_item_if_the_item_had_an_error(self):
- errors = [exceptions.ValidationError("a message", path=["bar"])]
- tree = exceptions.ErrorTree(errors)
- self.assertIn("bar", tree)
-
- def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
- errors = [exceptions.ValidationError("a message", path=["bar"])]
- tree = exceptions.ErrorTree(errors)
- self.assertNotIn("foo", tree)
-
- def test_validators_that_failed_appear_in_errors_dict(self):
- error = exceptions.ValidationError("a message", validator="foo")
- tree = exceptions.ErrorTree([error])
- self.assertEqual(tree.errors, {"foo" : error})
-
- def test_it_creates_a_child_tree_for_each_nested_path(self):
- errors = [
- exceptions.ValidationError("a bar message", path=["bar"]),
- exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
- ]
- tree = exceptions.ErrorTree(errors)
- self.assertIn(0, tree["bar"])
- self.assertNotIn(1, tree["bar"])
-
- def test_children_have_their_errors_dicts_built(self):
- e1, e2 = (
- exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
- exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
- )
- tree = exceptions.ErrorTree([e1, e2])
- self.assertEqual(tree["bar"][0].errors, {"foo" : e1, "quux" : e2})
-
- def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
- error = exceptions.ValidationError("123", validator="foo", instance=[])
- tree = exceptions.ErrorTree([error])
-
- with self.assertRaises(IndexError):
- tree[0]
-
- def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
- """
- If a validator is dumb (like :validator:`required` in draft 3) and
- refers to a path that isn't in the instance, the tree still properly
- returns a subtree for that path.
-
- """
-
- error = exceptions.ValidationError(
- "a message", validator="foo", instance={}, path=["foo"],
- )
- tree = exceptions.ErrorTree([error])
- self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
-
-
-class TestErrorReprStr(unittest.TestCase):
- def make_error(self, **kwargs):
- defaults = dict(
- message=u"hello",
- validator=u"type",
- validator_value=u"string",
- instance=5,
- schema={u"type": u"string"},
- )
- defaults.update(kwargs)
- return exceptions.ValidationError(**defaults)
-
- def assertShows(self, expected, **kwargs):
- if PY3:
- expected = expected.replace("u'", "'")
- expected = textwrap.dedent(expected).rstrip("\n")
-
- error = self.make_error(**kwargs)
- message_line, _, rest = str(error).partition("\n")
- self.assertEqual(message_line, error.message)
- self.assertEqual(rest, expected)
-
- def test_repr(self):
- self.assertEqual(
- repr(exceptions.ValidationError(message="Hello!")),
- "<ValidationError: %r>" % "Hello!",
- )
-
- def test_unset_error(self):
- error = exceptions.ValidationError("message")
- self.assertEqual(str(error), "message")
-
- kwargs = {
- "validator": "type",
- "validator_value": "string",
- "instance": 5,
- "schema": {"type": "string"}
- }
- # Just the message should show if any of the attributes are unset
- for attr in kwargs:
- k = dict(kwargs)
- del k[attr]
- error = exceptions.ValidationError("message", **k)
- self.assertEqual(str(error), "message")
-
- def test_empty_paths(self):
- self.assertShows(
- """
- Failed validating u'type' in schema:
- {u'type': u'string'}
-
- On instance:
- 5
- """,
- path=[],
- schema_path=[],
- )
-
- def test_one_item_paths(self):
- self.assertShows(
- """
- Failed validating u'type' in schema:
- {u'type': u'string'}
-
- On instance[0]:
- 5
- """,
- path=[0],
- schema_path=["items"],
- )
-
- def test_multiple_item_paths(self):
- self.assertShows(
- """
- Failed validating u'type' in schema[u'items'][0]:
- {u'type': u'string'}
-
- On instance[0][u'a']:
- 5
- """,
- path=[0, u"a"],
- schema_path=[u"items", 0, 1],
- )
-
- def test_uses_pprint(self):
- with mock.patch("pprint.pformat") as pformat:
- str(self.make_error())
- self.assertEqual(pformat.call_count, 2) # schema + instance
-
- def test_str_works_with_instances_having_overriden_eq_operator(self):
- """
- Check for https://github.com/Julian/jsonschema/issues/164 which
- rendered exceptions unusable when a `ValidationError` involved
- instances with an `__eq__` method that returned truthy values.
-
- """
-
- instance = mock.MagicMock()
- error = exceptions.ValidationError(
- "a message",
- validator="foo",
- instance=instance,
- validator_value="some",
- schema="schema",
- )
- str(error)
- self.assertFalse(instance.__eq__.called)
diff --git a/lib/spack/external/jsonschema/tests/test_format.py b/lib/spack/external/jsonschema/tests/test_format.py
deleted file mode 100644
index 8392ca1de3..0000000000
--- a/lib/spack/external/jsonschema/tests/test_format.py
+++ /dev/null
@@ -1,63 +0,0 @@
-"""
-Tests for the parts of jsonschema related to the :validator:`format` property.
-
-"""
-
-from jsonschema.tests.compat import mock, unittest
-
-from jsonschema import FormatError, ValidationError, FormatChecker
-from jsonschema.validators import Draft4Validator
-
-
-class TestFormatChecker(unittest.TestCase):
- def setUp(self):
- self.fn = mock.Mock()
-
- def test_it_can_validate_no_formats(self):
- checker = FormatChecker(formats=())
- self.assertFalse(checker.checkers)
-
- def test_it_raises_a_key_error_for_unknown_formats(self):
- with self.assertRaises(KeyError):
- FormatChecker(formats=["o noes"])
-
- def test_it_can_register_cls_checkers(self):
- with mock.patch.dict(FormatChecker.checkers, clear=True):
- FormatChecker.cls_checks("new")(self.fn)
- self.assertEqual(FormatChecker.checkers, {"new" : (self.fn, ())})
-
- def test_it_can_register_checkers(self):
- checker = FormatChecker()
- checker.checks("new")(self.fn)
- self.assertEqual(
- checker.checkers,
- dict(FormatChecker.checkers, new=(self.fn, ()))
- )
-
- def test_it_catches_registered_errors(self):
- checker = FormatChecker()
- cause = self.fn.side_effect = ValueError()
-
- checker.checks("foo", raises=ValueError)(self.fn)
-
- with self.assertRaises(FormatError) as cm:
- checker.check("bar", "foo")
-
- self.assertIs(cm.exception.cause, cause)
- self.assertIs(cm.exception.__cause__, cause)
-
- # Unregistered errors should not be caught
- self.fn.side_effect = AttributeError
- with self.assertRaises(AttributeError):
- checker.check("bar", "foo")
-
- def test_format_error_causes_become_validation_error_causes(self):
- checker = FormatChecker()
- checker.checks("foo", raises=ValueError)(self.fn)
- cause = self.fn.side_effect = ValueError()
- validator = Draft4Validator({"format" : "foo"}, format_checker=checker)
-
- with self.assertRaises(ValidationError) as cm:
- validator.validate("bar")
-
- self.assertIs(cm.exception.__cause__, cause)
diff --git a/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py b/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py
deleted file mode 100644
index 75c6857bc0..0000000000
--- a/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py
+++ /dev/null
@@ -1,290 +0,0 @@
-"""
-Test runner for the JSON Schema official test suite
-
-Tests comprehensive correctness of each draft's validator.
-
-See https://github.com/json-schema/JSON-Schema-Test-Suite for details.
-
-"""
-
-from contextlib import closing
-from decimal import Decimal
-import glob
-import json
-import io
-import itertools
-import os
-import re
-import subprocess
-import sys
-
-try:
- from sys import pypy_version_info
-except ImportError:
- pypy_version_info = None
-
-from jsonschema import (
- FormatError, SchemaError, ValidationError, Draft3Validator,
- Draft4Validator, FormatChecker, draft3_format_checker,
- draft4_format_checker, validate,
-)
-from jsonschema.compat import PY3
-from jsonschema.tests.compat import mock, unittest
-import jsonschema
-
-
-REPO_ROOT = os.path.join(os.path.dirname(jsonschema.__file__), os.path.pardir)
-SUITE = os.getenv("JSON_SCHEMA_TEST_SUITE", os.path.join(REPO_ROOT, "json"))
-
-if not os.path.isdir(SUITE):
- raise ValueError(
- "Can't find the JSON-Schema-Test-Suite directory. Set the "
- "'JSON_SCHEMA_TEST_SUITE' environment variable or run the tests from "
- "alongside a checkout of the suite."
- )
-
-TESTS_DIR = os.path.join(SUITE, "tests")
-JSONSCHEMA_SUITE = os.path.join(SUITE, "bin", "jsonschema_suite")
-
-remotes_stdout = subprocess.Popen(
- ["python", JSONSCHEMA_SUITE, "remotes"], stdout=subprocess.PIPE,
-).stdout
-
-with closing(remotes_stdout):
- if PY3:
- remotes_stdout = io.TextIOWrapper(remotes_stdout)
- REMOTES = json.load(remotes_stdout)
-
-
-def make_case(schema, data, valid, name):
- if valid:
- def test_case(self):
- kwargs = getattr(self, "validator_kwargs", {})
- validate(data, schema, cls=self.validator_class, **kwargs)
- else:
- def test_case(self):
- kwargs = getattr(self, "validator_kwargs", {})
- with self.assertRaises(ValidationError):
- validate(data, schema, cls=self.validator_class, **kwargs)
-
- if not PY3:
- name = name.encode("utf-8")
- test_case.__name__ = name
-
- return test_case
-
-
-def maybe_skip(skip, test_case, case, test):
- if skip is not None:
- reason = skip(case, test)
- if reason is not None:
- test_case = unittest.skip(reason)(test_case)
- return test_case
-
-
-def load_json_cases(tests_glob, ignore_glob="", basedir=TESTS_DIR, skip=None):
- if ignore_glob:
- ignore_glob = os.path.join(basedir, ignore_glob)
-
- def add_test_methods(test_class):
- ignored = set(glob.iglob(ignore_glob))
-
- for filename in glob.iglob(os.path.join(basedir, tests_glob)):
- if filename in ignored:
- continue
-
- validating, _ = os.path.splitext(os.path.basename(filename))
- id = itertools.count(1)
-
- with open(filename) as test_file:
- for case in json.load(test_file):
- for test in case["tests"]:
- name = "test_%s_%s_%s" % (
- validating,
- next(id),
- re.sub(r"[\W ]+", "_", test["description"]),
- )
- assert not hasattr(test_class, name), name
-
- test_case = make_case(
- data=test["data"],
- schema=case["schema"],
- valid=test["valid"],
- name=name,
- )
- test_case = maybe_skip(skip, test_case, case, test)
- setattr(test_class, name, test_case)
-
- return test_class
- return add_test_methods
-
-
-class TypesMixin(object):
- @unittest.skipIf(PY3, "In Python 3 json.load always produces unicode")
- def test_string_a_bytestring_is_a_string(self):
- self.validator_class({"type" : "string"}).validate(b"foo")
-
-
-class DecimalMixin(object):
- def test_it_can_validate_with_decimals(self):
- schema = {"type" : "number"}
- validator = self.validator_class(
- schema, types={"number" : (int, float, Decimal)}
- )
-
- for valid in [1, 1.1, Decimal(1) / Decimal(8)]:
- validator.validate(valid)
-
- for invalid in ["foo", {}, [], True, None]:
- with self.assertRaises(ValidationError):
- validator.validate(invalid)
-
-
-def missing_format(checker):
- def missing_format(case, test):
- format = case["schema"].get("format")
- if format not in checker.checkers:
- return "Format checker {0!r} not found.".format(format)
- elif (
- format == "date-time" and
- pypy_version_info is not None and
- pypy_version_info[:2] <= (1, 9)
- ):
- # datetime.datetime is overzealous about typechecking in <=1.9
- return "datetime.datetime is broken on this version of PyPy."
- return missing_format
-
-
-class FormatMixin(object):
- def test_it_returns_true_for_formats_it_does_not_know_about(self):
- validator = self.validator_class(
- {"format" : "carrot"}, format_checker=FormatChecker(),
- )
- validator.validate("bugs")
-
- def test_it_does_not_validate_formats_by_default(self):
- validator = self.validator_class({})
- self.assertIsNone(validator.format_checker)
-
- def test_it_validates_formats_if_a_checker_is_provided(self):
- checker = mock.Mock(spec=FormatChecker)
- validator = self.validator_class(
- {"format" : "foo"}, format_checker=checker,
- )
-
- validator.validate("bar")
-
- checker.check.assert_called_once_with("bar", "foo")
-
- cause = ValueError()
- checker.check.side_effect = FormatError('aoeu', cause=cause)
-
- with self.assertRaises(ValidationError) as cm:
- validator.validate("bar")
- # Make sure original cause is attached
- self.assertIs(cm.exception.cause, cause)
-
- def test_it_validates_formats_of_any_type(self):
- checker = mock.Mock(spec=FormatChecker)
- validator = self.validator_class(
- {"format" : "foo"}, format_checker=checker,
- )
-
- validator.validate([1, 2, 3])
-
- checker.check.assert_called_once_with([1, 2, 3], "foo")
-
- cause = ValueError()
- checker.check.side_effect = FormatError('aoeu', cause=cause)
-
- with self.assertRaises(ValidationError) as cm:
- validator.validate([1, 2, 3])
- # Make sure original cause is attached
- self.assertIs(cm.exception.cause, cause)
-
-
-if sys.maxunicode == 2 ** 16 - 1: # This is a narrow build.
- def narrow_unicode_build(case, test):
- if "supplementary Unicode" in test["description"]:
- return "Not running surrogate Unicode case, this Python is narrow."
-else:
- def narrow_unicode_build(case, test): # This isn't, skip nothing.
- return
-
-
-@load_json_cases(
- "draft3/*.json",
- skip=narrow_unicode_build,
- ignore_glob="draft3/refRemote.json",
-)
-@load_json_cases(
- "draft3/optional/format.json", skip=missing_format(draft3_format_checker)
-)
-@load_json_cases("draft3/optional/bignum.json")
-@load_json_cases("draft3/optional/zeroTerminatedFloats.json")
-class TestDraft3(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
- validator_class = Draft3Validator
- validator_kwargs = {"format_checker" : draft3_format_checker}
-
- def test_any_type_is_valid_for_type_any(self):
- validator = self.validator_class({"type" : "any"})
- validator.validate(mock.Mock())
-
- # TODO: we're in need of more meta schema tests
- def test_invalid_properties(self):
- with self.assertRaises(SchemaError):
- validate({}, {"properties": {"test": True}},
- cls=self.validator_class)
-
- def test_minItems_invalid_string(self):
- with self.assertRaises(SchemaError):
- # needs to be an integer
- validate([1], {"minItems" : "1"}, cls=self.validator_class)
-
-
-@load_json_cases(
- "draft4/*.json",
- skip=narrow_unicode_build,
- ignore_glob="draft4/refRemote.json",
-)
-@load_json_cases(
- "draft4/optional/format.json", skip=missing_format(draft4_format_checker)
-)
-@load_json_cases("draft4/optional/bignum.json")
-@load_json_cases("draft4/optional/zeroTerminatedFloats.json")
-class TestDraft4(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
- validator_class = Draft4Validator
- validator_kwargs = {"format_checker" : draft4_format_checker}
-
- # TODO: we're in need of more meta schema tests
- def test_invalid_properties(self):
- with self.assertRaises(SchemaError):
- validate({}, {"properties": {"test": True}},
- cls=self.validator_class)
-
- def test_minItems_invalid_string(self):
- with self.assertRaises(SchemaError):
- # needs to be an integer
- validate([1], {"minItems" : "1"}, cls=self.validator_class)
-
-
-class RemoteRefResolutionMixin(object):
- def setUp(self):
- patch = mock.patch("jsonschema.validators.requests")
- requests = patch.start()
- requests.get.side_effect = self.resolve
- self.addCleanup(patch.stop)
-
- def resolve(self, reference):
- _, _, reference = reference.partition("http://localhost:1234/")
- return mock.Mock(**{"json.return_value" : REMOTES.get(reference)})
-
-
-@load_json_cases("draft3/refRemote.json")
-class Draft3RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
- validator_class = Draft3Validator
-
-
-@load_json_cases("draft4/refRemote.json")
-class Draft4RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
- validator_class = Draft4Validator
diff --git a/lib/spack/external/jsonschema/tests/test_validators.py b/lib/spack/external/jsonschema/tests/test_validators.py
deleted file mode 100644
index f8692388ea..0000000000
--- a/lib/spack/external/jsonschema/tests/test_validators.py
+++ /dev/null
@@ -1,786 +0,0 @@
-from collections import deque
-from contextlib import contextmanager
-import json
-
-from jsonschema import FormatChecker, ValidationError
-from jsonschema.tests.compat import mock, unittest
-from jsonschema.validators import (
- RefResolutionError, UnknownType, Draft3Validator,
- Draft4Validator, RefResolver, create, extend, validator_for, validate,
-)
-
-
-class TestCreateAndExtend(unittest.TestCase):
- def setUp(self):
- self.meta_schema = {u"properties" : {u"smelly" : {}}}
- self.smelly = mock.MagicMock()
- self.validators = {u"smelly" : self.smelly}
- self.types = {u"dict" : dict}
- self.Validator = create(
- meta_schema=self.meta_schema,
- validators=self.validators,
- default_types=self.types,
- )
-
- self.validator_value = 12
- self.schema = {u"smelly" : self.validator_value}
- self.validator = self.Validator(self.schema)
-
- def test_attrs(self):
- self.assertEqual(self.Validator.VALIDATORS, self.validators)
- self.assertEqual(self.Validator.META_SCHEMA, self.meta_schema)
- self.assertEqual(self.Validator.DEFAULT_TYPES, self.types)
-
- def test_init(self):
- self.assertEqual(self.validator.schema, self.schema)
-
- def test_iter_errors(self):
- instance = "hello"
-
- self.smelly.return_value = []
- self.assertEqual(list(self.validator.iter_errors(instance)), [])
-
- error = mock.Mock()
- self.smelly.return_value = [error]
- self.assertEqual(list(self.validator.iter_errors(instance)), [error])
-
- self.smelly.assert_called_with(
- self.validator, self.validator_value, instance, self.schema,
- )
-
- def test_if_a_version_is_provided_it_is_registered(self):
- with mock.patch("jsonschema.validators.validates") as validates:
- validates.side_effect = lambda version : lambda cls : cls
- Validator = create(meta_schema={u"id" : ""}, version="my version")
- validates.assert_called_once_with("my version")
- self.assertEqual(Validator.__name__, "MyVersionValidator")
-
- def test_if_a_version_is_not_provided_it_is_not_registered(self):
- with mock.patch("jsonschema.validators.validates") as validates:
- create(meta_schema={u"id" : "id"})
- self.assertFalse(validates.called)
-
- def test_extend(self):
- validators = dict(self.Validator.VALIDATORS)
- new = mock.Mock()
-
- Extended = extend(self.Validator, validators={u"a new one" : new})
-
- validators.update([(u"a new one", new)])
- self.assertEqual(Extended.VALIDATORS, validators)
- self.assertNotIn(u"a new one", self.Validator.VALIDATORS)
-
- self.assertEqual(Extended.META_SCHEMA, self.Validator.META_SCHEMA)
- self.assertEqual(Extended.DEFAULT_TYPES, self.Validator.DEFAULT_TYPES)
-
-
-class TestIterErrors(unittest.TestCase):
- def setUp(self):
- self.validator = Draft3Validator({})
-
- def test_iter_errors(self):
- instance = [1, 2]
- schema = {
- u"disallow" : u"array",
- u"enum" : [["a", "b", "c"], ["d", "e", "f"]],
- u"minItems" : 3
- }
-
- got = (e.message for e in self.validator.iter_errors(instance, schema))
- expected = [
- "%r is disallowed for [1, 2]" % (schema["disallow"],),
- "[1, 2] is too short",
- "[1, 2] is not one of %r" % (schema["enum"],),
- ]
- self.assertEqual(sorted(got), sorted(expected))
-
- def test_iter_errors_multiple_failures_one_validator(self):
- instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
- schema = {
- u"properties" : {
- "foo" : {u"type" : "string"},
- "bar" : {u"minItems" : 2},
- "baz" : {u"maximum" : 10, u"enum" : [2, 4, 6, 8]},
- }
- }
-
- errors = list(self.validator.iter_errors(instance, schema))
- self.assertEqual(len(errors), 4)
-
-
-class TestValidationErrorMessages(unittest.TestCase):
- def message_for(self, instance, schema, *args, **kwargs):
- kwargs.setdefault("cls", Draft3Validator)
- with self.assertRaises(ValidationError) as e:
- validate(instance, schema, *args, **kwargs)
- return e.exception.message
-
- def test_single_type_failure(self):
- message = self.message_for(instance=1, schema={u"type" : u"string"})
- self.assertEqual(message, "1 is not of type %r" % u"string")
-
- def test_single_type_list_failure(self):
- message = self.message_for(instance=1, schema={u"type" : [u"string"]})
- self.assertEqual(message, "1 is not of type %r" % u"string")
-
- def test_multiple_type_failure(self):
- types = u"string", u"object"
- message = self.message_for(instance=1, schema={u"type" : list(types)})
- self.assertEqual(message, "1 is not of type %r, %r" % types)
-
- def test_object_without_title_type_failure(self):
- type = {u"type" : [{u"minimum" : 3}]}
- message = self.message_for(instance=1, schema={u"type" : [type]})
- self.assertEqual(message, "1 is not of type %r" % (type,))
-
- def test_object_with_name_type_failure(self):
- name = "Foo"
- schema = {u"type" : [{u"name" : name, u"minimum" : 3}]}
- message = self.message_for(instance=1, schema=schema)
- self.assertEqual(message, "1 is not of type %r" % (name,))
-
- def test_minimum(self):
- message = self.message_for(instance=1, schema={"minimum" : 2})
- self.assertEqual(message, "1 is less than the minimum of 2")
-
- def test_maximum(self):
- message = self.message_for(instance=1, schema={"maximum" : 0})
- self.assertEqual(message, "1 is greater than the maximum of 0")
-
- def test_dependencies_failure_has_single_element_not_list(self):
- depend, on = "bar", "foo"
- schema = {u"dependencies" : {depend : on}}
- message = self.message_for({"bar" : 2}, schema)
- self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
-
- def test_additionalItems_single_failure(self):
- message = self.message_for(
- [2], {u"items" : [], u"additionalItems" : False},
- )
- self.assertIn("(2 was unexpected)", message)
-
- def test_additionalItems_multiple_failures(self):
- message = self.message_for(
- [1, 2, 3], {u"items" : [], u"additionalItems" : False}
- )
- self.assertIn("(1, 2, 3 were unexpected)", message)
-
- def test_additionalProperties_single_failure(self):
- additional = "foo"
- schema = {u"additionalProperties" : False}
- message = self.message_for({additional : 2}, schema)
- self.assertIn("(%r was unexpected)" % (additional,), message)
-
- def test_additionalProperties_multiple_failures(self):
- schema = {u"additionalProperties" : False}
- message = self.message_for(dict.fromkeys(["foo", "bar"]), schema)
-
- self.assertIn(repr("foo"), message)
- self.assertIn(repr("bar"), message)
- self.assertIn("were unexpected)", message)
-
- def test_invalid_format_default_message(self):
- checker = FormatChecker(formats=())
- check_fn = mock.Mock(return_value=False)
- checker.checks(u"thing")(check_fn)
-
- schema = {u"format" : u"thing"}
- message = self.message_for("bla", schema, format_checker=checker)
-
- self.assertIn(repr("bla"), message)
- self.assertIn(repr("thing"), message)
- self.assertIn("is not a", message)
-
-
-class TestValidationErrorDetails(unittest.TestCase):
- # TODO: These really need unit tests for each individual validator, rather
- # than just these higher level tests.
- def test_anyOf(self):
- instance = 5
- schema = {
- "anyOf": [
- {"minimum": 20},
- {"type": "string"}
- ]
- }
-
- validator = Draft4Validator(schema)
- errors = list(validator.iter_errors(instance))
- self.assertEqual(len(errors), 1)
- e = errors[0]
-
- self.assertEqual(e.validator, "anyOf")
- self.assertEqual(e.validator_value, schema["anyOf"])
- self.assertEqual(e.instance, instance)
- self.assertEqual(e.schema, schema)
- self.assertIsNone(e.parent)
-
- self.assertEqual(e.path, deque([]))
- self.assertEqual(e.relative_path, deque([]))
- self.assertEqual(e.absolute_path, deque([]))
-
- self.assertEqual(e.schema_path, deque(["anyOf"]))
- self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
- self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
-
- self.assertEqual(len(e.context), 2)
-
- e1, e2 = sorted_errors(e.context)
-
- self.assertEqual(e1.validator, "minimum")
- self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
- self.assertEqual(e1.instance, instance)
- self.assertEqual(e1.schema, schema["anyOf"][0])
- self.assertIs(e1.parent, e)
-
- self.assertEqual(e1.path, deque([]))
- self.assertEqual(e1.absolute_path, deque([]))
- self.assertEqual(e1.relative_path, deque([]))
-
- self.assertEqual(e1.schema_path, deque([0, "minimum"]))
- self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
- self.assertEqual(
- e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
- )
-
- self.assertFalse(e1.context)
-
- self.assertEqual(e2.validator, "type")
- self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
- self.assertEqual(e2.instance, instance)
- self.assertEqual(e2.schema, schema["anyOf"][1])
- self.assertIs(e2.parent, e)
-
- self.assertEqual(e2.path, deque([]))
- self.assertEqual(e2.relative_path, deque([]))
- self.assertEqual(e2.absolute_path, deque([]))
-
- self.assertEqual(e2.schema_path, deque([1, "type"]))
- self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
- self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
-
- self.assertEqual(len(e2.context), 0)
-
- def test_type(self):
- instance = {"foo": 1}
- schema = {
- "type": [
- {"type": "integer"},
- {
- "type": "object",
- "properties": {
- "foo": {"enum": [2]}
- }
- }
- ]
- }
-
- validator = Draft3Validator(schema)
- errors = list(validator.iter_errors(instance))
- self.assertEqual(len(errors), 1)
- e = errors[0]
-
- self.assertEqual(e.validator, "type")
- self.assertEqual(e.validator_value, schema["type"])
- self.assertEqual(e.instance, instance)
- self.assertEqual(e.schema, schema)
- self.assertIsNone(e.parent)
-
- self.assertEqual(e.path, deque([]))
- self.assertEqual(e.relative_path, deque([]))
- self.assertEqual(e.absolute_path, deque([]))
-
- self.assertEqual(e.schema_path, deque(["type"]))
- self.assertEqual(e.relative_schema_path, deque(["type"]))
- self.assertEqual(e.absolute_schema_path, deque(["type"]))
-
- self.assertEqual(len(e.context), 2)
-
- e1, e2 = sorted_errors(e.context)
-
- self.assertEqual(e1.validator, "type")
- self.assertEqual(e1.validator_value, schema["type"][0]["type"])
- self.assertEqual(e1.instance, instance)
- self.assertEqual(e1.schema, schema["type"][0])
- self.assertIs(e1.parent, e)
-
- self.assertEqual(e1.path, deque([]))
- self.assertEqual(e1.relative_path, deque([]))
- self.assertEqual(e1.absolute_path, deque([]))
-
- self.assertEqual(e1.schema_path, deque([0, "type"]))
- self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
- self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
-
- self.assertFalse(e1.context)
-
- self.assertEqual(e2.validator, "enum")
- self.assertEqual(e2.validator_value, [2])
- self.assertEqual(e2.instance, 1)
- self.assertEqual(e2.schema, {u"enum" : [2]})
- self.assertIs(e2.parent, e)
-
- self.assertEqual(e2.path, deque(["foo"]))
- self.assertEqual(e2.relative_path, deque(["foo"]))
- self.assertEqual(e2.absolute_path, deque(["foo"]))
-
- self.assertEqual(
- e2.schema_path, deque([1, "properties", "foo", "enum"]),
- )
- self.assertEqual(
- e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
- )
- self.assertEqual(
- e2.absolute_schema_path,
- deque(["type", 1, "properties", "foo", "enum"]),
- )
-
- self.assertFalse(e2.context)
-
- def test_single_nesting(self):
- instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
- schema = {
- "properties" : {
- "foo" : {"type" : "string"},
- "bar" : {"minItems" : 2},
- "baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]},
- }
- }
-
- validator = Draft3Validator(schema)
- errors = validator.iter_errors(instance)
- e1, e2, e3, e4 = sorted_errors(errors)
-
- self.assertEqual(e1.path, deque(["bar"]))
- self.assertEqual(e2.path, deque(["baz"]))
- self.assertEqual(e3.path, deque(["baz"]))
- self.assertEqual(e4.path, deque(["foo"]))
-
- self.assertEqual(e1.relative_path, deque(["bar"]))
- self.assertEqual(e2.relative_path, deque(["baz"]))
- self.assertEqual(e3.relative_path, deque(["baz"]))
- self.assertEqual(e4.relative_path, deque(["foo"]))
-
- self.assertEqual(e1.absolute_path, deque(["bar"]))
- self.assertEqual(e2.absolute_path, deque(["baz"]))
- self.assertEqual(e3.absolute_path, deque(["baz"]))
- self.assertEqual(e4.absolute_path, deque(["foo"]))
-
- self.assertEqual(e1.validator, "minItems")
- self.assertEqual(e2.validator, "enum")
- self.assertEqual(e3.validator, "maximum")
- self.assertEqual(e4.validator, "type")
-
- def test_multiple_nesting(self):
- instance = [1, {"foo" : 2, "bar" : {"baz" : [1]}}, "quux"]
- schema = {
- "type" : "string",
- "items" : {
- "type" : ["string", "object"],
- "properties" : {
- "foo" : {"enum" : [1, 3]},
- "bar" : {
- "type" : "array",
- "properties" : {
- "bar" : {"required" : True},
- "baz" : {"minItems" : 2},
- }
- }
- }
- }
- }
-
- validator = Draft3Validator(schema)
- errors = validator.iter_errors(instance)
- e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
-
- self.assertEqual(e1.path, deque([]))
- self.assertEqual(e2.path, deque([0]))
- self.assertEqual(e3.path, deque([1, "bar"]))
- self.assertEqual(e4.path, deque([1, "bar", "bar"]))
- self.assertEqual(e5.path, deque([1, "bar", "baz"]))
- self.assertEqual(e6.path, deque([1, "foo"]))
-
- self.assertEqual(e1.schema_path, deque(["type"]))
- self.assertEqual(e2.schema_path, deque(["items", "type"]))
- self.assertEqual(
- list(e3.schema_path), ["items", "properties", "bar", "type"],
- )
- self.assertEqual(
- list(e4.schema_path),
- ["items", "properties", "bar", "properties", "bar", "required"],
- )
- self.assertEqual(
- list(e5.schema_path),
- ["items", "properties", "bar", "properties", "baz", "minItems"]
- )
- self.assertEqual(
- list(e6.schema_path), ["items", "properties", "foo", "enum"],
- )
-
- self.assertEqual(e1.validator, "type")
- self.assertEqual(e2.validator, "type")
- self.assertEqual(e3.validator, "type")
- self.assertEqual(e4.validator, "required")
- self.assertEqual(e5.validator, "minItems")
- self.assertEqual(e6.validator, "enum")
-
- def test_additionalProperties(self):
- instance = {"bar": "bar", "foo": 2}
- schema = {
- "additionalProperties" : {"type": "integer", "minimum": 5}
- }
-
- validator = Draft3Validator(schema)
- errors = validator.iter_errors(instance)
- e1, e2 = sorted_errors(errors)
-
- self.assertEqual(e1.path, deque(["bar"]))
- self.assertEqual(e2.path, deque(["foo"]))
-
- self.assertEqual(e1.validator, "type")
- self.assertEqual(e2.validator, "minimum")
-
- def test_patternProperties(self):
- instance = {"bar": 1, "foo": 2}
- schema = {
- "patternProperties" : {
- "bar": {"type": "string"},
- "foo": {"minimum": 5}
- }
- }
-
- validator = Draft3Validator(schema)
- errors = validator.iter_errors(instance)
- e1, e2 = sorted_errors(errors)
-
- self.assertEqual(e1.path, deque(["bar"]))
- self.assertEqual(e2.path, deque(["foo"]))
-
- self.assertEqual(e1.validator, "type")
- self.assertEqual(e2.validator, "minimum")
-
- def test_additionalItems(self):
- instance = ["foo", 1]
- schema = {
- "items": [],
- "additionalItems" : {"type": "integer", "minimum": 5}
- }
-
- validator = Draft3Validator(schema)
- errors = validator.iter_errors(instance)
- e1, e2 = sorted_errors(errors)
-
- self.assertEqual(e1.path, deque([0]))
- self.assertEqual(e2.path, deque([1]))
-
- self.assertEqual(e1.validator, "type")
- self.assertEqual(e2.validator, "minimum")
-
- def test_additionalItems_with_items(self):
- instance = ["foo", "bar", 1]
- schema = {
- "items": [{}],
- "additionalItems" : {"type": "integer", "minimum": 5}
- }
-
- validator = Draft3Validator(schema)
- errors = validator.iter_errors(instance)
- e1, e2 = sorted_errors(errors)
-
- self.assertEqual(e1.path, deque([1]))
- self.assertEqual(e2.path, deque([2]))
-
- self.assertEqual(e1.validator, "type")
- self.assertEqual(e2.validator, "minimum")
-
-
-class ValidatorTestMixin(object):
- def setUp(self):
- self.instance = mock.Mock()
- self.schema = {}
- self.resolver = mock.Mock()
- self.validator = self.validator_class(self.schema)
-
- def test_valid_instances_are_valid(self):
- errors = iter([])
-
- with mock.patch.object(
- self.validator, "iter_errors", return_value=errors,
- ):
- self.assertTrue(
- self.validator.is_valid(self.instance, self.schema)
- )
-
- def test_invalid_instances_are_not_valid(self):
- errors = iter([mock.Mock()])
-
- with mock.patch.object(
- self.validator, "iter_errors", return_value=errors,
- ):
- self.assertFalse(
- self.validator.is_valid(self.instance, self.schema)
- )
-
- def test_non_existent_properties_are_ignored(self):
- instance, my_property, my_value = mock.Mock(), mock.Mock(), mock.Mock()
- validate(instance=instance, schema={my_property : my_value})
-
- def test_it_creates_a_ref_resolver_if_not_provided(self):
- self.assertIsInstance(self.validator.resolver, RefResolver)
-
- def test_it_delegates_to_a_ref_resolver(self):
- resolver = RefResolver("", {})
- schema = {"$ref" : mock.Mock()}
-
- @contextmanager
- def resolving():
- yield {"type": "integer"}
-
- with mock.patch.object(resolver, "resolving") as resolve:
- resolve.return_value = resolving()
- with self.assertRaises(ValidationError):
- self.validator_class(schema, resolver=resolver).validate(None)
-
- resolve.assert_called_once_with(schema["$ref"])
-
- def test_is_type_is_true_for_valid_type(self):
- self.assertTrue(self.validator.is_type("foo", "string"))
-
- def test_is_type_is_false_for_invalid_type(self):
- self.assertFalse(self.validator.is_type("foo", "array"))
-
- def test_is_type_evades_bool_inheriting_from_int(self):
- self.assertFalse(self.validator.is_type(True, "integer"))
- self.assertFalse(self.validator.is_type(True, "number"))
-
- def test_is_type_raises_exception_for_unknown_type(self):
- with self.assertRaises(UnknownType):
- self.validator.is_type("foo", object())
-
-
-class TestDraft3Validator(ValidatorTestMixin, unittest.TestCase):
- validator_class = Draft3Validator
-
- def test_is_type_is_true_for_any_type(self):
- self.assertTrue(self.validator.is_valid(mock.Mock(), {"type": "any"}))
-
- def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
- self.assertTrue(self.validator.is_type(True, "boolean"))
- self.assertTrue(self.validator.is_valid(True, {"type": "any"}))
-
- def test_non_string_custom_types(self):
- schema = {'type': [None]}
- cls = self.validator_class(schema, types={None: type(None)})
- cls.validate(None, schema)
-
-
-class TestDraft4Validator(ValidatorTestMixin, unittest.TestCase):
- validator_class = Draft4Validator
-
-
-class TestBuiltinFormats(unittest.TestCase):
- """
- The built-in (specification-defined) formats do not raise type errors.
-
- If an instance or value is not a string, it should be ignored.
-
- """
-
-
-for format in FormatChecker.checkers:
- def test(self, format=format):
- v = Draft4Validator({"format": format}, format_checker=FormatChecker())
- v.validate(123)
-
- name = "test_{0}_ignores_non_strings".format(format)
- test.__name__ = name
- setattr(TestBuiltinFormats, name, test)
- del test # Ugh py.test. Stop discovering top level tests.
-
-
-class TestValidatorFor(unittest.TestCase):
- def test_draft_3(self):
- schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
- self.assertIs(validator_for(schema), Draft3Validator)
-
- schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
- self.assertIs(validator_for(schema), Draft3Validator)
-
- def test_draft_4(self):
- schema = {"$schema" : "http://json-schema.org/draft-04/schema"}
- self.assertIs(validator_for(schema), Draft4Validator)
-
- schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
- self.assertIs(validator_for(schema), Draft4Validator)
-
- def test_custom_validator(self):
- Validator = create(meta_schema={"id" : "meta schema id"}, version="12")
- schema = {"$schema" : "meta schema id"}
- self.assertIs(validator_for(schema), Validator)
-
- def test_validator_for_jsonschema_default(self):
- self.assertIs(validator_for({}), Draft4Validator)
-
- def test_validator_for_custom_default(self):
- self.assertIs(validator_for({}, default=None), None)
-
-
-class TestValidate(unittest.TestCase):
- def test_draft3_validator_is_chosen(self):
- schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
- with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
- validate({}, schema)
- chk_schema.assert_called_once_with(schema)
- # Make sure it works without the empty fragment
- schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
- with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
- validate({}, schema)
- chk_schema.assert_called_once_with(schema)
-
- def test_draft4_validator_is_chosen(self):
- schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
- with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
- validate({}, schema)
- chk_schema.assert_called_once_with(schema)
-
- def test_draft4_validator_is_the_default(self):
- with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
- validate({}, {})
- chk_schema.assert_called_once_with({})
-
-
-class TestRefResolver(unittest.TestCase):
-
- base_uri = ""
- stored_uri = "foo://stored"
- stored_schema = {"stored" : "schema"}
-
- def setUp(self):
- self.referrer = {}
- self.store = {self.stored_uri : self.stored_schema}
- self.resolver = RefResolver(self.base_uri, self.referrer, self.store)
-
- def test_it_does_not_retrieve_schema_urls_from_the_network(self):
- ref = Draft3Validator.META_SCHEMA["id"]
- with mock.patch.object(self.resolver, "resolve_remote") as remote:
- with self.resolver.resolving(ref) as resolved:
- self.assertEqual(resolved, Draft3Validator.META_SCHEMA)
- self.assertFalse(remote.called)
-
- def test_it_resolves_local_refs(self):
- ref = "#/properties/foo"
- self.referrer["properties"] = {"foo" : object()}
- with self.resolver.resolving(ref) as resolved:
- self.assertEqual(resolved, self.referrer["properties"]["foo"])
-
- def test_it_resolves_local_refs_with_id(self):
- schema = {"id": "foo://bar/schema#", "a": {"foo": "bar"}}
- resolver = RefResolver.from_schema(schema)
- with resolver.resolving("#/a") as resolved:
- self.assertEqual(resolved, schema["a"])
- with resolver.resolving("foo://bar/schema#/a") as resolved:
- self.assertEqual(resolved, schema["a"])
-
- def test_it_retrieves_stored_refs(self):
- with self.resolver.resolving(self.stored_uri) as resolved:
- self.assertIs(resolved, self.stored_schema)
-
- self.resolver.store["cached_ref"] = {"foo" : 12}
- with self.resolver.resolving("cached_ref#/foo") as resolved:
- self.assertEqual(resolved, 12)
-
- def test_it_retrieves_unstored_refs_via_requests(self):
- ref = "http://bar#baz"
- schema = {"baz" : 12}
-
- with mock.patch("jsonschema.validators.requests") as requests:
- requests.get.return_value.json.return_value = schema
- with self.resolver.resolving(ref) as resolved:
- self.assertEqual(resolved, 12)
- requests.get.assert_called_once_with("http://bar")
-
- def test_it_retrieves_unstored_refs_via_urlopen(self):
- ref = "http://bar#baz"
- schema = {"baz" : 12}
-
- with mock.patch("jsonschema.validators.requests", None):
- with mock.patch("jsonschema.validators.urlopen") as urlopen:
- urlopen.return_value.read.return_value = (
- json.dumps(schema).encode("utf8"))
- with self.resolver.resolving(ref) as resolved:
- self.assertEqual(resolved, 12)
- urlopen.assert_called_once_with("http://bar")
-
- def test_it_can_construct_a_base_uri_from_a_schema(self):
- schema = {"id" : "foo"}
- resolver = RefResolver.from_schema(schema)
- self.assertEqual(resolver.base_uri, "foo")
- with resolver.resolving("") as resolved:
- self.assertEqual(resolved, schema)
- with resolver.resolving("#") as resolved:
- self.assertEqual(resolved, schema)
- with resolver.resolving("foo") as resolved:
- self.assertEqual(resolved, schema)
- with resolver.resolving("foo#") as resolved:
- self.assertEqual(resolved, schema)
-
- def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
- schema = {}
- resolver = RefResolver.from_schema(schema)
- self.assertEqual(resolver.base_uri, "")
- with resolver.resolving("") as resolved:
- self.assertEqual(resolved, schema)
- with resolver.resolving("#") as resolved:
- self.assertEqual(resolved, schema)
-
- def test_custom_uri_scheme_handlers(self):
- schema = {"foo": "bar"}
- ref = "foo://bar"
- foo_handler = mock.Mock(return_value=schema)
- resolver = RefResolver("", {}, handlers={"foo": foo_handler})
- with resolver.resolving(ref) as resolved:
- self.assertEqual(resolved, schema)
- foo_handler.assert_called_once_with(ref)
-
- def test_cache_remote_on(self):
- ref = "foo://bar"
- foo_handler = mock.Mock()
- resolver = RefResolver(
- "", {}, cache_remote=True, handlers={"foo" : foo_handler},
- )
- with resolver.resolving(ref):
- pass
- with resolver.resolving(ref):
- pass
- foo_handler.assert_called_once_with(ref)
-
- def test_cache_remote_off(self):
- ref = "foo://bar"
- foo_handler = mock.Mock()
- resolver = RefResolver(
- "", {}, cache_remote=False, handlers={"foo" : foo_handler},
- )
- with resolver.resolving(ref):
- pass
- with resolver.resolving(ref):
- pass
- self.assertEqual(foo_handler.call_count, 2)
-
- def test_if_you_give_it_junk_you_get_a_resolution_error(self):
- ref = "foo://bar"
- foo_handler = mock.Mock(side_effect=ValueError("Oh no! What's this?"))
- resolver = RefResolver("", {}, handlers={"foo" : foo_handler})
- with self.assertRaises(RefResolutionError) as err:
- with resolver.resolving(ref):
- pass
- self.assertEqual(str(err.exception), "Oh no! What's this?")
-
-
-def sorted_errors(errors):
- def key(error):
- return (
- [str(e) for e in error.path],
- [str(e) for e in error.schema_path]
- )
- return sorted(errors, key=key)
diff --git a/lib/spack/external/jsonschema/validators.py b/lib/spack/external/jsonschema/validators.py
index 30c3515398..1dc420c70d 100644
--- a/lib/spack/external/jsonschema/validators.py
+++ b/lib/spack/external/jsonschema/validators.py
@@ -1,26 +1,107 @@
+"""
+Creation and extension of validators, with implementations for existing drafts.
+"""
from __future__ import division
+from warnings import warn
import contextlib
import json
import numbers
-requests = None
+from six import add_metaclass
-from jsonschema import _utils, _validators
+from jsonschema import (
+ _legacy_validators,
+ _types,
+ _utils,
+ _validators,
+ exceptions,
+)
from jsonschema.compat import (
- Sequence, urljoin, urlsplit, urldefrag, unquote, urlopen,
- str_types, int_types, iteritems,
+ Sequence,
+ int_types,
+ iteritems,
+ lru_cache,
+ str_types,
+ unquote,
+ urldefrag,
+ urljoin,
+ urlopen,
+ urlsplit,
)
-from jsonschema.exceptions import ErrorTree # Backwards compatibility # noqa
-from jsonschema.exceptions import RefResolutionError, SchemaError, UnknownType
+# Sigh. https://gitlab.com/pycqa/flake8/issues/280
+# https://github.com/pyga/ebb-lint/issues/7
+# Imported for backwards compatibility.
+from jsonschema.exceptions import ErrorTree
+ErrorTree
+
+
+class _DontDoThat(Exception):
+ """
+ Raised when a Validators with non-default type checker is misused.
+
+ Asking one for DEFAULT_TYPES doesn't make sense, since type checkers
+ exist for the unrepresentable cases where DEFAULT_TYPES can't
+ represent the type relationship.
+ """
+
+ def __str__(self):
+ return "DEFAULT_TYPES cannot be used on Validators using TypeCheckers"
-_unset = _utils.Unset()
validators = {}
meta_schemas = _utils.URIDict()
+def _generate_legacy_type_checks(types=()):
+ """
+ Generate newer-style type checks out of JSON-type-name-to-type mappings.
+
+ Arguments:
+
+ types (dict):
+
+ A mapping of type names to their Python types
+
+ Returns:
+
+ A dictionary of definitions to pass to `TypeChecker`
+ """
+ types = dict(types)
+
+ def gen_type_check(pytypes):
+ pytypes = _utils.flatten(pytypes)
+
+ def type_check(checker, instance):
+ if isinstance(instance, bool):
+ if bool not in pytypes:
+ return False
+ return isinstance(instance, pytypes)
+
+ return type_check
+
+ definitions = {}
+ for typename, pytypes in iteritems(types):
+ definitions[typename] = gen_type_check(pytypes)
+
+ return definitions
+
+
+_DEPRECATED_DEFAULT_TYPES = {
+ u"array": list,
+ u"boolean": bool,
+ u"integer": int_types,
+ u"null": type(None),
+ u"number": numbers.Number,
+ u"object": dict,
+ u"string": str_types,
+}
+_TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES = _types.TypeChecker(
+ type_checkers=_generate_legacy_type_checks(_DEPRECATED_DEFAULT_TYPES),
+)
+
+
def validates(version):
"""
Register the decorated validator for a ``version`` of the specification.
@@ -28,40 +109,180 @@ def validates(version):
Registered validators and their meta schemas will be considered when
parsing ``$schema`` properties' URIs.
- :argument str version: an identifier to use as the version's name
- :returns: a class decorator to decorate the validator with the version
+ Arguments:
+
+ version (str):
+
+ An identifier to use as the version's name
+
+ Returns:
+
+ collections.Callable:
+ a class decorator to decorate the validator with the version
"""
def _validates(cls):
validators[version] = cls
- if u"id" in cls.META_SCHEMA:
- meta_schemas[cls.META_SCHEMA[u"id"]] = cls
+ meta_schema_id = cls.ID_OF(cls.META_SCHEMA)
+ if meta_schema_id:
+ meta_schemas[meta_schema_id] = cls
return cls
return _validates
-def create(meta_schema, validators=(), version=None, default_types=None): # noqa
- if default_types is None:
- default_types = {
- u"array" : list, u"boolean" : bool, u"integer" : int_types,
- u"null" : type(None), u"number" : numbers.Number, u"object" : dict,
- u"string" : str_types,
- }
+def _DEFAULT_TYPES(self):
+ if self._CREATED_WITH_DEFAULT_TYPES is None:
+ raise _DontDoThat()
+
+ warn(
+ (
+ "The DEFAULT_TYPES attribute is deprecated. "
+ "See the type checker attached to this validator instead."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self._DEFAULT_TYPES
+
+
+class _DefaultTypesDeprecatingMetaClass(type):
+ DEFAULT_TYPES = property(_DEFAULT_TYPES)
+
+
+def _id_of(schema):
+ if schema is True or schema is False:
+ return u""
+ return schema.get(u"$id", u"")
+
+
+def create(
+ meta_schema,
+ validators=(),
+ version=None,
+ default_types=None,
+ type_checker=None,
+ id_of=_id_of,
+):
+ """
+ Create a new validator class.
+
+ Arguments:
+
+ meta_schema (collections.Mapping):
+
+ the meta schema for the new validator class
+
+ validators (collections.Mapping):
+
+ a mapping from names to callables, where each callable will
+ validate the schema property with the given name.
+
+ Each callable should take 4 arguments:
+
+ 1. a validator instance,
+ 2. the value of the property being validated within the
+ instance
+ 3. the instance
+ 4. the schema
+
+ version (str):
+
+ an identifier for the version that this validator class will
+ validate. If provided, the returned validator class will
+ have its ``__name__`` set to include the version, and also
+ will have `jsonschema.validators.validates` automatically
+ called for the given version.
+ type_checker (jsonschema.TypeChecker):
+
+ a type checker, used when applying the :validator:`type` validator.
+
+ If unprovided, a `jsonschema.TypeChecker` will be created
+ with a set of default types typical of JSON Schema drafts.
+
+ default_types (collections.Mapping):
+
+ .. deprecated:: 3.0.0
+
+ Please use the type_checker argument instead.
+
+ If set, it provides mappings of JSON types to Python types
+ that will be converted to functions and redefined in this
+ object's `jsonschema.TypeChecker`.
+
+ id_of (collections.Callable):
+
+ A function that given a schema, returns its ID.
+
+ Returns:
+
+ a new `jsonschema.IValidator` class
+ """
+
+ if default_types is not None:
+ if type_checker is not None:
+ raise TypeError(
+ "Do not specify default_types when providing a type checker.",
+ )
+ _created_with_default_types = True
+ warn(
+ (
+ "The default_types argument is deprecated. "
+ "Use the type_checker argument instead."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ type_checker = _types.TypeChecker(
+ type_checkers=_generate_legacy_type_checks(default_types),
+ )
+ else:
+ default_types = _DEPRECATED_DEFAULT_TYPES
+ if type_checker is None:
+ _created_with_default_types = False
+ type_checker = _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES
+ elif type_checker is _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES:
+ _created_with_default_types = False
+ else:
+ _created_with_default_types = None
+
+ @add_metaclass(_DefaultTypesDeprecatingMetaClass)
class Validator(object):
+
VALIDATORS = dict(validators)
META_SCHEMA = dict(meta_schema)
- DEFAULT_TYPES = dict(default_types)
+ TYPE_CHECKER = type_checker
+ ID_OF = staticmethod(id_of)
+
+ DEFAULT_TYPES = property(_DEFAULT_TYPES)
+ _DEFAULT_TYPES = dict(default_types)
+ _CREATED_WITH_DEFAULT_TYPES = _created_with_default_types
def __init__(
- self, schema, types=(), resolver=None, format_checker=None,
+ self,
+ schema,
+ types=(),
+ resolver=None,
+ format_checker=None,
):
- self._types = dict(self.DEFAULT_TYPES)
- self._types.update(types)
+ if types:
+ warn(
+ (
+ "The types argument is deprecated. Provide "
+ "a type_checker to jsonschema.validators.extend "
+ "instead."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ self.TYPE_CHECKER = self.TYPE_CHECKER.redefine_many(
+ _generate_legacy_type_checks(types),
+ )
if resolver is None:
- resolver = RefResolver.from_schema(schema)
+ resolver = RefResolver.from_schema(schema, id_of=id_of)
self.resolver = resolver
self.format_checker = format_checker
@@ -70,13 +291,28 @@ def create(meta_schema, validators=(), version=None, default_types=None): # noq
@classmethod
def check_schema(cls, schema):
for error in cls(cls.META_SCHEMA).iter_errors(schema):
- raise SchemaError.create_from(error)
+ raise exceptions.SchemaError.create_from(error)
def iter_errors(self, instance, _schema=None):
if _schema is None:
_schema = self.schema
- with self.resolver.in_scope(_schema.get(u"id", u"")):
+ if _schema is True:
+ return
+ elif _schema is False:
+ yield exceptions.ValidationError(
+ "False schema does not allow %r" % (instance,),
+ validator=None,
+ validator_value=None,
+ instance=instance,
+ schema=_schema,
+ )
+ return
+
+ scope = id_of(_schema)
+ if scope:
+ self.resolver.push_scope(scope)
+ try:
ref = _schema.get(u"$ref")
if ref is not None:
validators = [(u"$ref", ref)]
@@ -100,6 +336,9 @@ def create(meta_schema, validators=(), version=None, default_types=None): # noq
if k != u"$ref":
error.schema_path.appendleft(k)
yield error
+ finally:
+ if scope:
+ self.resolver.pop_scope()
def descend(self, instance, schema, path=None, schema_path=None):
for error in self.iter_errors(instance, schema):
@@ -114,19 +353,10 @@ def create(meta_schema, validators=(), version=None, default_types=None): # noq
raise error
def is_type(self, instance, type):
- if type not in self._types:
- raise UnknownType(type, instance, self.schema)
- pytypes = self._types[type]
-
- # bool inherits from int, so ensure bools aren't reported as ints
- if isinstance(instance, bool):
- pytypes = _utils.flatten(pytypes)
- is_number = any(
- issubclass(pytype, numbers.Number) for pytype in pytypes
- )
- if is_number and bool not in pytypes:
- return False
- return isinstance(instance, pytypes)
+ try:
+ return self.TYPE_CHECKER.is_type(instance, type)
+ except exceptions.UndefinedTypeCheck:
+ raise exceptions.UnknownType(type, instance, self.schema)
def is_valid(self, instance, _schema=None):
error = next(self.iter_errors(instance, _schema), None)
@@ -139,104 +369,290 @@ def create(meta_schema, validators=(), version=None, default_types=None): # noq
return Validator
-def extend(validator, validators, version=None):
+def extend(validator, validators=(), version=None, type_checker=None):
+ """
+ Create a new validator class by extending an existing one.
+
+ Arguments:
+
+ validator (jsonschema.IValidator):
+
+ an existing validator class
+
+ validators (collections.Mapping):
+
+ a mapping of new validator callables to extend with, whose
+ structure is as in `create`.
+
+ .. note::
+
+ Any validator callables with the same name as an
+ existing one will (silently) replace the old validator
+ callable entirely, effectively overriding any validation
+ done in the "parent" validator class.
+
+ If you wish to instead extend the behavior of a parent's
+ validator callable, delegate and call it directly in
+ the new validator function by retrieving it using
+ ``OldValidator.VALIDATORS["validator_name"]``.
+
+ version (str):
+
+ a version for the new validator class
+
+ type_checker (jsonschema.TypeChecker):
+
+ a type checker, used when applying the :validator:`type` validator.
+
+ If unprovided, the type checker of the extended
+ `jsonschema.IValidator` will be carried along.`
+
+ Returns:
+
+ a new `jsonschema.IValidator` class extending the one provided
+
+ .. note:: Meta Schemas
+
+ The new validator class will have its parent's meta schema.
+
+ If you wish to change or extend the meta schema in the new
+ validator class, modify ``META_SCHEMA`` directly on the returned
+ class. Note that no implicit copying is done, so a copy should
+ likely be made before modifying it, in order to not affect the
+ old validator.
+ """
+
all_validators = dict(validator.VALIDATORS)
all_validators.update(validators)
+
+ if type_checker is None:
+ type_checker = validator.TYPE_CHECKER
+ elif validator._CREATED_WITH_DEFAULT_TYPES:
+ raise TypeError(
+ "Cannot extend a validator created with default_types "
+ "with a type_checker. Update the validator to use a "
+ "type_checker when created."
+ )
return create(
meta_schema=validator.META_SCHEMA,
validators=all_validators,
version=version,
- default_types=validator.DEFAULT_TYPES,
+ type_checker=type_checker,
+ id_of=validator.ID_OF,
)
Draft3Validator = create(
meta_schema=_utils.load_schema("draft3"),
validators={
- u"$ref" : _validators.ref,
- u"additionalItems" : _validators.additionalItems,
- u"additionalProperties" : _validators.additionalProperties,
- u"dependencies" : _validators.dependencies,
- u"disallow" : _validators.disallow_draft3,
- u"divisibleBy" : _validators.multipleOf,
- u"enum" : _validators.enum,
- u"extends" : _validators.extends_draft3,
- u"format" : _validators.format,
- u"items" : _validators.items,
- u"maxItems" : _validators.maxItems,
- u"maxLength" : _validators.maxLength,
- u"maximum" : _validators.maximum,
- u"minItems" : _validators.minItems,
- u"minLength" : _validators.minLength,
- u"minimum" : _validators.minimum,
- u"multipleOf" : _validators.multipleOf,
- u"pattern" : _validators.pattern,
- u"patternProperties" : _validators.patternProperties,
- u"properties" : _validators.properties_draft3,
- u"type" : _validators.type_draft3,
- u"uniqueItems" : _validators.uniqueItems,
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"dependencies": _legacy_validators.dependencies_draft3,
+ u"disallow": _legacy_validators.disallow_draft3,
+ u"divisibleBy": _validators.multipleOf,
+ u"enum": _validators.enum,
+ u"extends": _legacy_validators.extends_draft3,
+ u"format": _validators.format,
+ u"items": _legacy_validators.items_draft3_draft4,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maximum": _legacy_validators.maximum_draft3_draft4,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minimum": _legacy_validators.minimum_draft3_draft4,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _legacy_validators.properties_draft3,
+ u"type": _legacy_validators.type_draft3,
+ u"uniqueItems": _validators.uniqueItems,
},
+ type_checker=_types.draft3_type_checker,
version="draft3",
+ id_of=lambda schema: schema.get(u"id", ""),
)
Draft4Validator = create(
meta_schema=_utils.load_schema("draft4"),
validators={
- u"$ref" : _validators.ref,
- u"additionalItems" : _validators.additionalItems,
- u"additionalProperties" : _validators.additionalProperties,
- u"allOf" : _validators.allOf_draft4,
- u"anyOf" : _validators.anyOf_draft4,
- u"dependencies" : _validators.dependencies,
- u"enum" : _validators.enum,
- u"format" : _validators.format,
- u"items" : _validators.items,
- u"maxItems" : _validators.maxItems,
- u"maxLength" : _validators.maxLength,
- u"maxProperties" : _validators.maxProperties_draft4,
- u"maximum" : _validators.maximum,
- u"minItems" : _validators.minItems,
- u"minLength" : _validators.minLength,
- u"minProperties" : _validators.minProperties_draft4,
- u"minimum" : _validators.minimum,
- u"multipleOf" : _validators.multipleOf,
- u"not" : _validators.not_draft4,
- u"oneOf" : _validators.oneOf_draft4,
- u"pattern" : _validators.pattern,
- u"patternProperties" : _validators.patternProperties,
- u"properties" : _validators.properties_draft4,
- u"required" : _validators.required_draft4,
- u"type" : _validators.type_draft4,
- u"uniqueItems" : _validators.uniqueItems,
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"allOf": _validators.allOf,
+ u"anyOf": _validators.anyOf,
+ u"dependencies": _validators.dependencies,
+ u"enum": _validators.enum,
+ u"format": _validators.format,
+ u"items": _legacy_validators.items_draft3_draft4,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maxProperties": _validators.maxProperties,
+ u"maximum": _legacy_validators.maximum_draft3_draft4,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minProperties": _validators.minProperties,
+ u"minimum": _legacy_validators.minimum_draft3_draft4,
+ u"multipleOf": _validators.multipleOf,
+ u"not": _validators.not_,
+ u"oneOf": _validators.oneOf,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _validators.properties,
+ u"required": _validators.required,
+ u"type": _validators.type,
+ u"uniqueItems": _validators.uniqueItems,
},
+ type_checker=_types.draft4_type_checker,
version="draft4",
+ id_of=lambda schema: schema.get(u"id", ""),
+)
+
+Draft6Validator = create(
+ meta_schema=_utils.load_schema("draft6"),
+ validators={
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"allOf": _validators.allOf,
+ u"anyOf": _validators.anyOf,
+ u"const": _validators.const,
+ u"contains": _validators.contains,
+ u"dependencies": _validators.dependencies,
+ u"enum": _validators.enum,
+ u"exclusiveMaximum": _validators.exclusiveMaximum,
+ u"exclusiveMinimum": _validators.exclusiveMinimum,
+ u"format": _validators.format,
+ u"items": _validators.items,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maxProperties": _validators.maxProperties,
+ u"maximum": _validators.maximum,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minProperties": _validators.minProperties,
+ u"minimum": _validators.minimum,
+ u"multipleOf": _validators.multipleOf,
+ u"not": _validators.not_,
+ u"oneOf": _validators.oneOf,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _validators.properties,
+ u"propertyNames": _validators.propertyNames,
+ u"required": _validators.required,
+ u"type": _validators.type,
+ u"uniqueItems": _validators.uniqueItems,
+ },
+ type_checker=_types.draft6_type_checker,
+ version="draft6",
+)
+
+Draft7Validator = create(
+ meta_schema=_utils.load_schema("draft7"),
+ validators={
+ u"$ref": _validators.ref,
+ u"additionalItems": _validators.additionalItems,
+ u"additionalProperties": _validators.additionalProperties,
+ u"allOf": _validators.allOf,
+ u"anyOf": _validators.anyOf,
+ u"const": _validators.const,
+ u"contains": _validators.contains,
+ u"dependencies": _validators.dependencies,
+ u"enum": _validators.enum,
+ u"exclusiveMaximum": _validators.exclusiveMaximum,
+ u"exclusiveMinimum": _validators.exclusiveMinimum,
+ u"format": _validators.format,
+ u"if": _validators.if_,
+ u"items": _validators.items,
+ u"maxItems": _validators.maxItems,
+ u"maxLength": _validators.maxLength,
+ u"maxProperties": _validators.maxProperties,
+ u"maximum": _validators.maximum,
+ u"minItems": _validators.minItems,
+ u"minLength": _validators.minLength,
+ u"minProperties": _validators.minProperties,
+ u"minimum": _validators.minimum,
+ u"multipleOf": _validators.multipleOf,
+ u"oneOf": _validators.oneOf,
+ u"not": _validators.not_,
+ u"pattern": _validators.pattern,
+ u"patternProperties": _validators.patternProperties,
+ u"properties": _validators.properties,
+ u"propertyNames": _validators.propertyNames,
+ u"required": _validators.required,
+ u"type": _validators.type,
+ u"uniqueItems": _validators.uniqueItems,
+ },
+ type_checker=_types.draft7_type_checker,
+ version="draft7",
)
+_LATEST_VERSION = Draft7Validator
+
class RefResolver(object):
"""
Resolve JSON References.
- :argument str base_uri: URI of the referring document
- :argument referrer: the actual referring document
- :argument dict store: a mapping from URIs to documents to cache
- :argument bool cache_remote: whether remote refs should be cached after
- first resolution
- :argument dict handlers: a mapping from URI schemes to functions that
- should be used to retrieve them
+ Arguments:
+
+ base_uri (str):
+
+ The URI of the referring document
+
+ referrer:
+
+ The actual referring document
+
+ store (dict):
+
+ A mapping from URIs to documents to cache
+
+ cache_remote (bool):
+
+ Whether remote refs should be cached after first resolution
+
+ handlers (dict):
+
+ A mapping from URI schemes to functions that should be used
+ to retrieve them
+
+ urljoin_cache (:func:`functools.lru_cache`):
+ A cache that will be used for caching the results of joining
+ the resolution scope to subscopes.
+
+ remote_cache (:func:`functools.lru_cache`):
+
+ A cache that will be used for caching the results of
+ resolved remote URLs.
+
+ Attributes:
+
+ cache_remote (bool):
+
+ Whether remote refs should be cached after first resolution
"""
def __init__(
- self, base_uri, referrer, store=(), cache_remote=True, handlers=(),
+ self,
+ base_uri,
+ referrer,
+ store=(),
+ cache_remote=True,
+ handlers=(),
+ urljoin_cache=None,
+ remote_cache=None,
):
- self.base_uri = base_uri
- self.resolution_scope = base_uri
- # This attribute is not used, it is for backwards compatibility
+ if urljoin_cache is None:
+ urljoin_cache = lru_cache(1024)(urljoin)
+ if remote_cache is None:
+ remote_cache = lru_cache(1024)(self.resolve_from_url)
+
self.referrer = referrer
self.cache_remote = cache_remote
self.handlers = dict(handlers)
+ self._scopes_stack = [base_uri]
self.store = _utils.URIDict(
(id, validator.META_SCHEMA)
for id, validator in iteritems(meta_schemas)
@@ -244,64 +660,139 @@ class RefResolver(object):
self.store.update(store)
self.store[base_uri] = referrer
+ self._urljoin_cache = urljoin_cache
+ self._remote_cache = remote_cache
+
@classmethod
- def from_schema(cls, schema, *args, **kwargs):
+ def from_schema(cls, schema, id_of=_id_of, *args, **kwargs):
"""
Construct a resolver from a JSON schema object.
- :argument schema schema: the referring schema
- :rtype: :class:`RefResolver`
+ Arguments:
+
+ schema:
+
+ the referring schema
+
+ Returns:
+
+ `RefResolver`
+ """
+
+ return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs)
+
+ def push_scope(self, scope):
+ """
+ Enter a given sub-scope.
+
+ Treats further dereferences as being performed underneath the
+ given scope.
+ """
+ self._scopes_stack.append(
+ self._urljoin_cache(self.resolution_scope, scope),
+ )
+ def pop_scope(self):
"""
+ Exit the most recent entered scope.
- return cls(schema.get(u"id", u""), schema, *args, **kwargs)
+ Treats further dereferences as being performed underneath the
+ original scope.
+
+ Don't call this method more times than `push_scope` has been
+ called.
+ """
+ try:
+ self._scopes_stack.pop()
+ except IndexError:
+ raise exceptions.RefResolutionError(
+ "Failed to pop the scope from an empty stack. "
+ "`pop_scope()` should only be called once for every "
+ "`push_scope()`"
+ )
+
+ @property
+ def resolution_scope(self):
+ """
+ Retrieve the current resolution scope.
+ """
+ return self._scopes_stack[-1]
+
+ @property
+ def base_uri(self):
+ """
+ Retrieve the current base URI, not including any fragment.
+ """
+ uri, _ = urldefrag(self.resolution_scope)
+ return uri
@contextlib.contextmanager
def in_scope(self, scope):
- old_scope = self.resolution_scope
- self.resolution_scope = urljoin(old_scope, scope)
+ """
+ Temporarily enter the given scope for the duration of the context.
+ """
+ self.push_scope(scope)
try:
yield
finally:
- self.resolution_scope = old_scope
+ self.pop_scope()
@contextlib.contextmanager
def resolving(self, ref):
"""
- Context manager which resolves a JSON ``ref`` and enters the
- resolution scope of this ref.
+ Resolve the given ``ref`` and enter its resolution scope.
+
+ Exits the scope on exit of this context manager.
+
+ Arguments:
- :argument str ref: reference to resolve
+ ref (str):
+ The reference to resolve
"""
- full_uri = urljoin(self.resolution_scope, ref)
- uri, fragment = urldefrag(full_uri)
- if not uri:
- uri = self.base_uri
+ url, resolved = self.resolve(ref)
+ self.push_scope(url)
+ try:
+ yield resolved
+ finally:
+ self.pop_scope()
- if uri in self.store:
- document = self.store[uri]
- else:
+ def resolve(self, ref):
+ """
+ Resolve the given reference.
+ """
+ url = self._urljoin_cache(self.resolution_scope, ref)
+ return url, self._remote_cache(url)
+
+ def resolve_from_url(self, url):
+ """
+ Resolve the given remote URL.
+ """
+ url, fragment = urldefrag(url)
+ try:
+ document = self.store[url]
+ except KeyError:
try:
- document = self.resolve_remote(uri)
+ document = self.resolve_remote(url)
except Exception as exc:
- raise RefResolutionError(exc)
+ raise exceptions.RefResolutionError(exc)
- old_base_uri, self.base_uri = self.base_uri, uri
- try:
- with self.in_scope(uri):
- yield self.resolve_fragment(document, fragment)
- finally:
- self.base_uri = old_base_uri
+ return self.resolve_fragment(document, fragment)
def resolve_fragment(self, document, fragment):
"""
Resolve a ``fragment`` within the referenced ``document``.
- :argument document: the referrant document
- :argument str fragment: a URI fragment to resolve within it
+ Arguments:
+
+ document:
+ The referent document
+
+ fragment (str):
+
+ a URI fragment to resolve within it
"""
fragment = fragment.lstrip(u"/")
@@ -319,7 +810,7 @@ class RefResolver(object):
try:
document = document[part]
except (TypeError, LookupError):
- raise RefResolutionError(
+ raise exceptions.RefResolutionError(
"Unresolvable JSON pointer: %r" % fragment
)
@@ -329,8 +820,9 @@ class RefResolver(object):
"""
Resolve a remote ``uri``.
- Does not check the store first, but stores the retrieved document in
- the store if :attr:`RefResolver.cache_remote` is True.
+ If called directly, does not check the store first, but after
+ retrieving the document at the specified URI it will be saved in
+ the store if :attr:`cache_remote` is True.
.. note::
@@ -341,85 +833,138 @@ class RefResolver(object):
If it isn't, or if the scheme of the ``uri`` is not ``http`` or
``https``, UTF-8 is assumed.
- :argument str uri: the URI to resolve
- :returns: the retrieved document
+ Arguments:
+
+ uri (str):
- .. _requests: http://pypi.python.org/pypi/requests/
+ The URI to resolve
+ Returns:
+
+ The retrieved document
+
+ .. _requests: https://pypi.org/project/requests/
"""
+ try:
+ import requests
+ except ImportError:
+ requests = None
scheme = urlsplit(uri).scheme
if scheme in self.handlers:
result = self.handlers[scheme](uri)
- elif (
- scheme in [u"http", u"https"] and
- requests and
- getattr(requests.Response, "json", None) is not None
- ):
+ elif scheme in [u"http", u"https"] and requests:
# Requests has support for detecting the correct encoding of
# json over http
- if callable(requests.Response.json):
- result = requests.get(uri).json()
- else:
- result = requests.get(uri).json
+ result = requests.get(uri).json()
else:
# Otherwise, pass off to urllib and assume utf-8
- result = json.loads(urlopen(uri).read().decode("utf-8"))
+ with urlopen(uri) as url:
+ result = json.loads(url.read().decode("utf-8"))
if self.cache_remote:
self.store[uri] = result
return result
-def validator_for(schema, default=_unset):
- if default is _unset:
- default = Draft4Validator
- return meta_schemas.get(schema.get(u"$schema", u""), default)
-
-
def validate(instance, schema, cls=None, *args, **kwargs):
"""
Validate an instance under the given schema.
- >>> validate([2, 3, 4], {"maxItems" : 2})
+ >>> validate([2, 3, 4], {"maxItems": 2})
Traceback (most recent call last):
...
ValidationError: [2, 3, 4] is too long
- :func:`validate` will first verify that the provided schema is itself
- valid, since not doing so can lead to less obvious error messages and fail
- in less obvious or consistent ways. If you know you have a valid schema
- already or don't care, you might prefer using the
- :meth:`~IValidator.validate` method directly on a specific validator
- (e.g. :meth:`Draft4Validator.validate`).
+ :func:`validate` will first verify that the provided schema is
+ itself valid, since not doing so can lead to less obvious error
+ messages and fail in less obvious or consistent ways.
+
+ If you know you have a valid schema already, especially if you
+ intend to validate multiple instances with the same schema, you
+ likely would prefer using the `IValidator.validate` method directly
+ on a specific validator (e.g. ``Draft7Validator.validate``).
+
+
+ Arguments:
+ instance:
- :argument instance: the instance to validate
- :argument schema: the schema to validate with
- :argument cls: an :class:`IValidator` class that will be used to validate
- the instance.
+ The instance to validate
- If the ``cls`` argument is not provided, two things will happen in
- accordance with the specification. First, if the schema has a
- :validator:`$schema` property containing a known meta-schema [#]_ then the
- proper validator will be used. The specification recommends that all
- schemas contain :validator:`$schema` properties for this reason. If no
- :validator:`$schema` property is found, the default validator class is
- :class:`Draft4Validator`.
+ schema:
- Any other provided positional and keyword arguments will be passed on when
- instantiating the ``cls``.
+ The schema to validate with
- :raises:
- :exc:`ValidationError` if the instance is invalid
+ cls (IValidator):
- :exc:`SchemaError` if the schema itself is invalid
+ The class that will be used to validate the instance.
+
+ If the ``cls`` argument is not provided, two things will happen
+ in accordance with the specification. First, if the schema has a
+ :validator:`$schema` property containing a known meta-schema [#]_
+ then the proper validator will be used. The specification recommends
+ that all schemas contain :validator:`$schema` properties for this
+ reason. If no :validator:`$schema` property is found, the default
+ validator class is the latest released draft.
+
+ Any other provided positional and keyword arguments will be passed
+ on when instantiating the ``cls``.
+
+ Raises:
+
+ `jsonschema.exceptions.ValidationError` if the instance
+ is invalid
+
+ `jsonschema.exceptions.SchemaError` if the schema itself
+ is invalid
.. rubric:: Footnotes
- .. [#] known by a validator registered with :func:`validates`
+ .. [#] known by a validator registered with
+ `jsonschema.validators.validates`
"""
if cls is None:
cls = validator_for(schema)
+
cls.check_schema(schema)
- cls(schema, *args, **kwargs).validate(instance)
+ validator = cls(schema, *args, **kwargs)
+ error = exceptions.best_match(validator.iter_errors(instance))
+ if error is not None:
+ raise error
+
+
+def validator_for(schema, default=_LATEST_VERSION):
+ """
+ Retrieve the validator class appropriate for validating the given schema.
+
+ Uses the :validator:`$schema` property that should be present in the
+ given schema to look up the appropriate validator class.
+
+ Arguments:
+
+ schema (collections.Mapping or bool):
+
+ the schema to look at
+
+ default:
+
+ the default to return if the appropriate validator class
+ cannot be determined.
+
+ If unprovided, the default is to return the latest supported
+ draft.
+ """
+ if schema is True or schema is False or u"$schema" not in schema:
+ return default
+ if schema[u"$schema"] not in meta_schemas:
+ warn(
+ (
+ "The metaschema specified by $schema was not found. "
+ "Using the latest draft to validate, but this will raise "
+ "an error in the future."
+ ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return meta_schemas.get(schema[u"$schema"], _LATEST_VERSION)
diff --git a/lib/spack/external/markupsafe/AUTHORS b/lib/spack/external/markupsafe/AUTHORS
deleted file mode 100644
index f7e2942ecc..0000000000
--- a/lib/spack/external/markupsafe/AUTHORS
+++ /dev/null
@@ -1,13 +0,0 @@
-MarkupSafe is written and maintained by Armin Ronacher and
-various contributors:
-
-Development Lead
-````````````````
-
-- Armin Ronacher <armin.ronacher@active-4.com>
-
-Patches and Suggestions
-```````````````````````
-
-- Georg Brandl
-- Mickaël Guérin
diff --git a/lib/spack/external/markupsafe/LICENSE b/lib/spack/external/markupsafe/LICENSE
deleted file mode 100644
index 5d2693890d..0000000000
--- a/lib/spack/external/markupsafe/LICENSE
+++ /dev/null
@@ -1,33 +0,0 @@
-Copyright (c) 2010 by Armin Ronacher and contributors. See AUTHORS
-for more details.
-
-Some rights reserved.
-
-Redistribution and use in source and binary forms of the software as well
-as documentation, with or without modification, are permitted provided
-that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above
- copyright notice, this list of conditions and the following
- disclaimer in the documentation and/or other materials provided
- with the distribution.
-
-* The names of the contributors may not be used to endorse or
- promote products derived from this software without specific
- prior written permission.
-
-THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
-CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
-NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
-OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-DAMAGE.
diff --git a/lib/spack/external/markupsafe/LICENSE.rst b/lib/spack/external/markupsafe/LICENSE.rst
new file mode 100644
index 0000000000..9d227a0cc4
--- /dev/null
+++ b/lib/spack/external/markupsafe/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/lib/spack/external/markupsafe/README.rst b/lib/spack/external/markupsafe/README.rst
index 360a0877a4..3548b8d1f7 100644
--- a/lib/spack/external/markupsafe/README.rst
+++ b/lib/spack/external/markupsafe/README.rst
@@ -1,113 +1,69 @@
MarkupSafe
==========
-Implements a unicode subclass that supports HTML strings:
+MarkupSafe implements a text object that escapes characters so it is
+safe to use in HTML and XML. Characters that have special meanings are
+replaced so that they display as the actual characters. This mitigates
+injection attacks, meaning untrusted user input can safely be displayed
+on a page.
-.. code-block:: python
- >>> from markupsafe import Markup, escape
- >>> escape("<script>alert(document.cookie);</script>")
- Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
- >>> tmpl = Markup("<em>%s</em>")
- >>> tmpl % "Peter > Lustig"
- Markup(u'<em>Peter &gt; Lustig</em>')
-
-If you want to make an object unicode that is not yet unicode
-but don't want to lose the taint information, you can use the
-``soft_unicode`` function. (On Python 3 you can also use ``soft_str`` which
-is a different name for the same function).
-
-.. code-block:: python
-
- >>> from markupsafe import soft_unicode
- >>> soft_unicode(42)
- u'42'
- >>> soft_unicode(Markup('foo'))
- Markup(u'foo')
-
-HTML Representations
---------------------
-
-Objects can customize their HTML markup equivalent by overriding
-the ``__html__`` function:
+Installing
+----------
-.. code-block:: python
+Install and update using `pip`_:
- >>> class Foo(object):
- ... def __html__(self):
- ... return '<strong>Nice</strong>'
- ...
- >>> escape(Foo())
- Markup(u'<strong>Nice</strong>')
- >>> Markup(Foo())
- Markup(u'<strong>Nice</strong>')
+.. code-block:: text
-Silent Escapes
---------------
+ pip install -U MarkupSafe
-Since MarkupSafe 0.10 there is now also a separate escape function
-called ``escape_silent`` that returns an empty string for ``None`` for
-consistency with other systems that return empty strings for ``None``
-when escaping (for instance Pylons' webhelpers).
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
-If you also want to use this for the escape method of the Markup
-object, you can create your own subclass that does that:
-.. code-block:: python
+Examples
+--------
- from markupsafe import Markup, escape_silent as escape
+.. code-block:: pycon
- class SilentMarkup(Markup):
- __slots__ = ()
-
- @classmethod
- def escape(cls, s):
- return cls(escape(s))
-
-New-Style String Formatting
----------------------------
-
-Starting with MarkupSafe 0.21 new style string formats from Python 2.6 and
-3.x are now fully supported. Previously the escape behavior of those
-functions was spotty at best. The new implementations operates under the
-following algorithm:
-
-1. if an object has an ``__html_format__`` method it is called as
- replacement for ``__format__`` with the format specifier. It either
- has to return a string or markup object.
-2. if an object has an ``__html__`` method it is called.
-3. otherwise the default format system of Python kicks in and the result
- is HTML escaped.
-
-Here is how you can implement your own formatting:
+ >>> from markupsafe import Markup, escape
+ >>> # escape replaces special characters and wraps in Markup
+ >>> escape('<script>alert(document.cookie);</script>')
+ Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
+ >>> # wrap in Markup to mark text "safe" and prevent escaping
+ >>> Markup('<strong>Hello</strong>')
+ Markup('<strong>hello</strong>')
+ >>> escape(Markup('<strong>Hello</strong>'))
+ Markup('<strong>hello</strong>')
+ >>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
+ >>> # methods and operators escape their arguments
+ >>> template = Markup("Hello <em>%s</em>")
+ >>> template % '"World"'
+ Markup('Hello <em>&#34;World&#34;</em>')
-.. code-block:: python
- class User(object):
+Donate
+------
- def __init__(self, id, username):
- self.id = id
- self.username = username
+The Pallets organization develops and supports MarkupSafe and other
+libraries that use it. In order to grow the community of contributors
+and users, and allow the maintainers to devote more time to the
+projects, `please donate today`_.
- def __html_format__(self, format_spec):
- if format_spec == 'link':
- return Markup('<a href="/user/{0}">{1}</a>').format(
- self.id,
- self.__html__(),
- )
- elif format_spec:
- raise ValueError('Invalid format spec')
- return self.__html__()
+.. _please donate today: https://palletsprojects.com/donate
- def __html__(self):
- return Markup('<span class=user>{0}</span>').format(self.username)
-And to format that user:
+Links
+-----
-.. code-block:: python
+* Website: https://palletsprojects.com/p/markupsafe/
+* Documentation: https://markupsafe.palletsprojects.com/
+* License: `BSD-3-Clause <https://github.com/pallets/markupsafe/blob/master/LICENSE.rst>`_
+* Releases: https://pypi.org/project/MarkupSafe/
+* Code: https://github.com/pallets/markupsafe
+* Issue tracker: https://github.com/pallets/markupsafe/issues
+* Test status:
- >>> user = User(1, 'foo')
- >>> Markup('<p>User: {0:link}').format(user)
- Markup(u'<p>User: <a href="/user/1"><span class=user>foo</span></a>')
+ * Linux, Mac: https://travis-ci.org/pallets/markupsafe
+ * Windows: https://ci.appveyor.com/project/pallets/markupsafe
-Markupsafe supports Python 2.6, 2.7 and Python 3.3 and higher.
+* Test coverage: https://codecov.io/gh/pallets/markupsafe
diff --git a/lib/spack/external/markupsafe/__init__.py b/lib/spack/external/markupsafe/__init__.py
index 506326f450..da05ed328a 100644
--- a/lib/spack/external/markupsafe/__init__.py
+++ b/lib/spack/external/markupsafe/__init__.py
@@ -1,80 +1,74 @@
# -*- coding: utf-8 -*-
"""
- markupsafe
- ~~~~~~~~~~
+markupsafe
+~~~~~~~~~~
- Implements a Markup string.
+Implements an escape function and a Markup string to replace HTML
+special characters with safe representations.
- :copyright: (c) 2010 by Armin Ronacher.
- :license: BSD, see LICENSE for more details.
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
"""
import re
import string
-import sys
-from markupsafe._compat import text_type, string_types, int_types, \
- unichr, iteritems, PY2
-if sys.version_info >= (3, 3):
- from collections.abc import Mapping
-else:
- from collections import Mapping
+from ._compat import int_types
+from ._compat import iteritems
+from ._compat import Mapping
+from ._compat import PY2
+from ._compat import string_types
+from ._compat import text_type
+from ._compat import unichr
-__version__ = "1.0"
+__version__ = "1.1.1"
-__all__ = ['Markup', 'soft_unicode', 'escape', 'escape_silent']
+__all__ = ["Markup", "soft_unicode", "escape", "escape_silent"]
-
-_striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
-_entity_re = re.compile(r'&([^& ;]+);')
+_striptags_re = re.compile(r"(<!--.*?-->|<[^>]*>)")
+_entity_re = re.compile(r"&([^& ;]+);")
class Markup(text_type):
- r"""Marks a string as being safe for inclusion in HTML/XML output without
- needing to be escaped. This implements the `__html__` interface a couple
- of frameworks and web applications use. :class:`Markup` is a direct
- subclass of `unicode` and provides all the methods of `unicode` just that
- it escapes arguments passed and always returns `Markup`.
-
- The `escape` function returns markup objects so that double escaping can't
- happen.
-
- The constructor of the :class:`Markup` class can be used for three
- different things: When passed an unicode object it's assumed to be safe,
- when passed an object with an HTML representation (has an `__html__`
- method) that representation is used, otherwise the object passed is
- converted into a unicode string and then assumed to be safe:
-
- >>> Markup("Hello <em>World</em>!")
- Markup(u'Hello <em>World</em>!')
- >>> class Foo(object):
- ... def __html__(self):
- ... return '<a href="#">foo</a>'
+ """A string that is ready to be safely inserted into an HTML or XML
+ document, either because it was escaped or because it was marked
+ safe.
+
+ Passing an object to the constructor converts it to text and wraps
+ it to mark it safe without escaping. To escape the text, use the
+ :meth:`escape` class method instead.
+
+ >>> Markup('Hello, <em>World</em>!')
+ Markup('Hello, <em>World</em>!')
+ >>> Markup(42)
+ Markup('42')
+ >>> Markup.escape('Hello, <em>World</em>!')
+ Markup('Hello &lt;em&gt;World&lt;/em&gt;!')
+
+ This implements the ``__html__()`` interface that some frameworks
+ use. Passing an object that implements ``__html__()`` will wrap the
+ output of that method, marking it safe.
+
+ >>> class Foo:
+ ... def __html__(self):
+ ... return '<a href="/foo">foo</a>'
...
>>> Markup(Foo())
- Markup(u'<a href="#">foo</a>')
-
- If you want object passed being always treated as unsafe you can use the
- :meth:`escape` classmethod to create a :class:`Markup` object:
+ Markup('<a href="/foo">foo</a>')
- >>> Markup.escape("Hello <em>World</em>!")
- Markup(u'Hello &lt;em&gt;World&lt;/em&gt;!')
+ This is a subclass of the text type (``str`` in Python 3,
+ ``unicode`` in Python 2). It has the same methods as that type, but
+ all methods escape their arguments and return a ``Markup`` instance.
- Operations on a markup string are markup aware which means that all
- arguments are passed through the :func:`escape` function:
-
- >>> em = Markup("<em>%s</em>")
- >>> em % "foo & bar"
- Markup(u'<em>foo &amp; bar</em>')
- >>> strong = Markup("<strong>%(text)s</strong>")
- >>> strong % {'text': '<blink>hacker here</blink>'}
- Markup(u'<strong>&lt;blink&gt;hacker here&lt;/blink&gt;</strong>')
- >>> Markup("<em>Hello</em> ") + "<foo>"
- Markup(u'<em>Hello</em> &lt;foo&gt;')
+ >>> Markup('<em>%s</em>') % 'foo & bar'
+ Markup('<em>foo &amp; bar</em>')
+ >>> Markup('<em>Hello</em> ') + '<foo>'
+ Markup('<em>Hello</em> &lt;foo&gt;')
"""
+
__slots__ = ()
- def __new__(cls, base=u'', encoding=None, errors='strict'):
- if hasattr(base, '__html__'):
+ def __new__(cls, base=u"", encoding=None, errors="strict"):
+ if hasattr(base, "__html__"):
base = base.__html__()
if encoding is None:
return text_type.__new__(cls, base)
@@ -84,12 +78,12 @@ class Markup(text_type):
return self
def __add__(self, other):
- if isinstance(other, string_types) or hasattr(other, '__html__'):
+ if isinstance(other, string_types) or hasattr(other, "__html__"):
return self.__class__(super(Markup, self).__add__(self.escape(other)))
return NotImplemented
def __radd__(self, other):
- if hasattr(other, '__html__') or isinstance(other, string_types):
+ if hasattr(other, "__html__") or isinstance(other, string_types):
return self.escape(other).__add__(self)
return NotImplemented
@@ -97,6 +91,7 @@ class Markup(text_type):
if isinstance(num, int_types):
return self.__class__(text_type.__mul__(self, num))
return NotImplemented
+
__rmul__ = __mul__
def __mod__(self, arg):
@@ -107,115 +102,124 @@ class Markup(text_type):
return self.__class__(text_type.__mod__(self, arg))
def __repr__(self):
- return '%s(%s)' % (
- self.__class__.__name__,
- text_type.__repr__(self)
- )
+ return "%s(%s)" % (self.__class__.__name__, text_type.__repr__(self))
def join(self, seq):
return self.__class__(text_type.join(self, map(self.escape, seq)))
+
join.__doc__ = text_type.join.__doc__
def split(self, *args, **kwargs):
return list(map(self.__class__, text_type.split(self, *args, **kwargs)))
+
split.__doc__ = text_type.split.__doc__
def rsplit(self, *args, **kwargs):
return list(map(self.__class__, text_type.rsplit(self, *args, **kwargs)))
+
rsplit.__doc__ = text_type.rsplit.__doc__
def splitlines(self, *args, **kwargs):
- return list(map(self.__class__, text_type.splitlines(
- self, *args, **kwargs)))
+ return list(map(self.__class__, text_type.splitlines(self, *args, **kwargs)))
+
splitlines.__doc__ = text_type.splitlines.__doc__
def unescape(self):
- r"""Unescape markup again into an text_type string. This also resolves
- known HTML4 and XHTML entities:
+ """Convert escaped markup back into a text string. This replaces
+ HTML entities with the characters they represent.
- >>> Markup("Main &raquo; <em>About</em>").unescape()
- u'Main \xbb <em>About</em>'
+ >>> Markup('Main &raquo; <em>About</em>').unescape()
+ 'Main » <em>About</em>'
"""
- from markupsafe._constants import HTML_ENTITIES
+ from ._constants import HTML_ENTITIES
+
def handle_match(m):
name = m.group(1)
if name in HTML_ENTITIES:
return unichr(HTML_ENTITIES[name])
try:
- if name[:2] in ('#x', '#X'):
+ if name[:2] in ("#x", "#X"):
return unichr(int(name[2:], 16))
- elif name.startswith('#'):
+ elif name.startswith("#"):
return unichr(int(name[1:]))
except ValueError:
pass
# Don't modify unexpected input.
return m.group()
+
return _entity_re.sub(handle_match, text_type(self))
def striptags(self):
- r"""Unescape markup into an text_type string and strip all tags. This
- also resolves known HTML4 and XHTML entities. Whitespace is
- normalized to one:
+ """:meth:`unescape` the markup, remove tags, and normalize
+ whitespace to single spaces.
- >>> Markup("Main &raquo; <em>About</em>").striptags()
- u'Main \xbb About'
+ >>> Markup('Main &raquo;\t<em>About</em>').striptags()
+ 'Main » About'
"""
- stripped = u' '.join(_striptags_re.sub('', self).split())
+ stripped = u" ".join(_striptags_re.sub("", self).split())
return Markup(stripped).unescape()
@classmethod
def escape(cls, s):
- """Escape the string. Works like :func:`escape` with the difference
- that for subclasses of :class:`Markup` this function would return the
- correct subclass.
+ """Escape a string. Calls :func:`escape` and ensures that for
+ subclasses the correct type is returned.
"""
rv = escape(s)
if rv.__class__ is not cls:
return cls(rv)
return rv
- def make_simple_escaping_wrapper(name):
+ def make_simple_escaping_wrapper(name): # noqa: B902
orig = getattr(text_type, name)
+
def func(self, *args, **kwargs):
args = _escape_argspec(list(args), enumerate(args), self.escape)
_escape_argspec(kwargs, iteritems(kwargs), self.escape)
return self.__class__(orig(self, *args, **kwargs))
+
func.__name__ = orig.__name__
func.__doc__ = orig.__doc__
return func
- for method in '__getitem__', 'capitalize', \
- 'title', 'lower', 'upper', 'replace', 'ljust', \
- 'rjust', 'lstrip', 'rstrip', 'center', 'strip', \
- 'translate', 'expandtabs', 'swapcase', 'zfill':
+ for method in (
+ "__getitem__",
+ "capitalize",
+ "title",
+ "lower",
+ "upper",
+ "replace",
+ "ljust",
+ "rjust",
+ "lstrip",
+ "rstrip",
+ "center",
+ "strip",
+ "translate",
+ "expandtabs",
+ "swapcase",
+ "zfill",
+ ):
locals()[method] = make_simple_escaping_wrapper(method)
- # new in python 2.5
- if hasattr(text_type, 'partition'):
- def partition(self, sep):
- return tuple(map(self.__class__,
- text_type.partition(self, self.escape(sep))))
- def rpartition(self, sep):
- return tuple(map(self.__class__,
- text_type.rpartition(self, self.escape(sep))))
-
- # new in python 2.6
- if hasattr(text_type, 'format'):
- def format(*args, **kwargs):
- self, args = args[0], args[1:]
- formatter = EscapeFormatter(self.escape)
- kwargs = _MagicFormatMapping(args, kwargs)
- return self.__class__(formatter.vformat(self, args, kwargs))
-
- def __html_format__(self, format_spec):
- if format_spec:
- raise ValueError('Unsupported format specification '
- 'for Markup.')
- return self
+ def partition(self, sep):
+ return tuple(map(self.__class__, text_type.partition(self, self.escape(sep))))
+
+ def rpartition(self, sep):
+ return tuple(map(self.__class__, text_type.rpartition(self, self.escape(sep))))
+
+ def format(self, *args, **kwargs):
+ formatter = EscapeFormatter(self.escape)
+ kwargs = _MagicFormatMapping(args, kwargs)
+ return self.__class__(formatter.vformat(self, args, kwargs))
+
+ def __html_format__(self, format_spec):
+ if format_spec:
+ raise ValueError("Unsupported format specification " "for Markup.")
+ return self
# not in python 3
- if hasattr(text_type, '__getslice__'):
- __getslice__ = make_simple_escaping_wrapper('__getslice__')
+ if hasattr(text_type, "__getslice__"):
+ __getslice__ = make_simple_escaping_wrapper("__getslice__")
del method, make_simple_escaping_wrapper
@@ -234,7 +238,7 @@ class _MagicFormatMapping(Mapping):
self._last_index = 0
def __getitem__(self, key):
- if key == '':
+ if key == "":
idx = self._last_index
self._last_index += 1
try:
@@ -251,35 +255,37 @@ class _MagicFormatMapping(Mapping):
return len(self._kwargs)
-if hasattr(text_type, 'format'):
- class EscapeFormatter(string.Formatter):
+if hasattr(text_type, "format"):
+ class EscapeFormatter(string.Formatter):
def __init__(self, escape):
self.escape = escape
def format_field(self, value, format_spec):
- if hasattr(value, '__html_format__'):
+ if hasattr(value, "__html_format__"):
rv = value.__html_format__(format_spec)
- elif hasattr(value, '__html__'):
+ elif hasattr(value, "__html__"):
if format_spec:
- raise ValueError('No format specification allowed '
- 'when formatting an object with '
- 'its __html__ method.')
+ raise ValueError(
+ "Format specifier {0} given, but {1} does not"
+ " define __html_format__. A class that defines"
+ " __html__ must define __html_format__ to work"
+ " with format specifiers.".format(format_spec, type(value))
+ )
rv = value.__html__()
else:
# We need to make sure the format spec is unicode here as
# otherwise the wrong callback methods are invoked. For
# instance a byte string there would invoke __str__ and
# not __unicode__.
- rv = string.Formatter.format_field(
- self, value, text_type(format_spec))
+ rv = string.Formatter.format_field(self, value, text_type(format_spec))
return text_type(self.escape(rv))
def _escape_argspec(obj, iterable, escape):
"""Helper for various string-wrapped functions."""
for key, value in iterable:
- if hasattr(value, '__html__') or isinstance(value, string_types):
+ if hasattr(value, "__html__") or isinstance(value, string_types):
obj[key] = escape(value)
return obj
@@ -291,20 +297,31 @@ class _MarkupEscapeHelper(object):
self.obj = obj
self.escape = escape
- __getitem__ = lambda s, x: _MarkupEscapeHelper(s.obj[x], s.escape)
- __unicode__ = __str__ = lambda s: text_type(s.escape(s.obj))
- __repr__ = lambda s: str(s.escape(repr(s.obj)))
- __int__ = lambda s: int(s.obj)
- __float__ = lambda s: float(s.obj)
+ def __getitem__(self, item):
+ return _MarkupEscapeHelper(self.obj[item], self.escape)
+
+ def __str__(self):
+ return text_type(self.escape(self.obj))
+
+ __unicode__ = __str__
+
+ def __repr__(self):
+ return str(self.escape(repr(self.obj)))
+
+ def __int__(self):
+ return int(self.obj)
+
+ def __float__(self):
+ return float(self.obj)
# we have to import it down here as the speedups and native
# modules imports the markup type which is define above.
try:
- from markupsafe._speedups import escape, escape_silent, soft_unicode
+ from ._speedups import escape, escape_silent, soft_unicode
except ImportError:
- from markupsafe._native import escape, escape_silent, soft_unicode
+ from ._native import escape, escape_silent, soft_unicode
if not PY2:
soft_str = soft_unicode
- __all__.append('soft_str')
+ __all__.append("soft_str")
diff --git a/lib/spack/external/markupsafe/_compat.py b/lib/spack/external/markupsafe/_compat.py
index 62e5632ad8..bc05090f9e 100644
--- a/lib/spack/external/markupsafe/_compat.py
+++ b/lib/spack/external/markupsafe/_compat.py
@@ -1,12 +1,10 @@
# -*- coding: utf-8 -*-
"""
- markupsafe._compat
- ~~~~~~~~~~~~~~~~~~
+markupsafe._compat
+~~~~~~~~~~~~~~~~~~
- Compatibility module for different Python versions.
-
- :copyright: (c) 2013 by Armin Ronacher.
- :license: BSD, see LICENSE for more details.
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
"""
import sys
@@ -17,10 +15,19 @@ if not PY2:
string_types = (str,)
unichr = chr
int_types = (int,)
- iteritems = lambda x: iter(x.items())
+
+ def iteritems(x):
+ return iter(x.items())
+
+ from collections.abc import Mapping
+
else:
text_type = unicode
string_types = (str, unicode)
unichr = unichr
int_types = (int, long)
- iteritems = lambda x: x.iteritems()
+
+ def iteritems(x):
+ return x.iteritems()
+
+ from collections import Mapping
diff --git a/lib/spack/external/markupsafe/_constants.py b/lib/spack/external/markupsafe/_constants.py
index 919bf03c50..7c57c2d294 100644
--- a/lib/spack/external/markupsafe/_constants.py
+++ b/lib/spack/external/markupsafe/_constants.py
@@ -1,267 +1,264 @@
# -*- coding: utf-8 -*-
"""
- markupsafe._constants
- ~~~~~~~~~~~~~~~~~~~~~
+markupsafe._constants
+~~~~~~~~~~~~~~~~~~~~~
- Highlevel implementation of the Markup string.
-
- :copyright: (c) 2010 by Armin Ronacher.
- :license: BSD, see LICENSE for more details.
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
"""
-
HTML_ENTITIES = {
- 'AElig': 198,
- 'Aacute': 193,
- 'Acirc': 194,
- 'Agrave': 192,
- 'Alpha': 913,
- 'Aring': 197,
- 'Atilde': 195,
- 'Auml': 196,
- 'Beta': 914,
- 'Ccedil': 199,
- 'Chi': 935,
- 'Dagger': 8225,
- 'Delta': 916,
- 'ETH': 208,
- 'Eacute': 201,
- 'Ecirc': 202,
- 'Egrave': 200,
- 'Epsilon': 917,
- 'Eta': 919,
- 'Euml': 203,
- 'Gamma': 915,
- 'Iacute': 205,
- 'Icirc': 206,
- 'Igrave': 204,
- 'Iota': 921,
- 'Iuml': 207,
- 'Kappa': 922,
- 'Lambda': 923,
- 'Mu': 924,
- 'Ntilde': 209,
- 'Nu': 925,
- 'OElig': 338,
- 'Oacute': 211,
- 'Ocirc': 212,
- 'Ograve': 210,
- 'Omega': 937,
- 'Omicron': 927,
- 'Oslash': 216,
- 'Otilde': 213,
- 'Ouml': 214,
- 'Phi': 934,
- 'Pi': 928,
- 'Prime': 8243,
- 'Psi': 936,
- 'Rho': 929,
- 'Scaron': 352,
- 'Sigma': 931,
- 'THORN': 222,
- 'Tau': 932,
- 'Theta': 920,
- 'Uacute': 218,
- 'Ucirc': 219,
- 'Ugrave': 217,
- 'Upsilon': 933,
- 'Uuml': 220,
- 'Xi': 926,
- 'Yacute': 221,
- 'Yuml': 376,
- 'Zeta': 918,
- 'aacute': 225,
- 'acirc': 226,
- 'acute': 180,
- 'aelig': 230,
- 'agrave': 224,
- 'alefsym': 8501,
- 'alpha': 945,
- 'amp': 38,
- 'and': 8743,
- 'ang': 8736,
- 'apos': 39,
- 'aring': 229,
- 'asymp': 8776,
- 'atilde': 227,
- 'auml': 228,
- 'bdquo': 8222,
- 'beta': 946,
- 'brvbar': 166,
- 'bull': 8226,
- 'cap': 8745,
- 'ccedil': 231,
- 'cedil': 184,
- 'cent': 162,
- 'chi': 967,
- 'circ': 710,
- 'clubs': 9827,
- 'cong': 8773,
- 'copy': 169,
- 'crarr': 8629,
- 'cup': 8746,
- 'curren': 164,
- 'dArr': 8659,
- 'dagger': 8224,
- 'darr': 8595,
- 'deg': 176,
- 'delta': 948,
- 'diams': 9830,
- 'divide': 247,
- 'eacute': 233,
- 'ecirc': 234,
- 'egrave': 232,
- 'empty': 8709,
- 'emsp': 8195,
- 'ensp': 8194,
- 'epsilon': 949,
- 'equiv': 8801,
- 'eta': 951,
- 'eth': 240,
- 'euml': 235,
- 'euro': 8364,
- 'exist': 8707,
- 'fnof': 402,
- 'forall': 8704,
- 'frac12': 189,
- 'frac14': 188,
- 'frac34': 190,
- 'frasl': 8260,
- 'gamma': 947,
- 'ge': 8805,
- 'gt': 62,
- 'hArr': 8660,
- 'harr': 8596,
- 'hearts': 9829,
- 'hellip': 8230,
- 'iacute': 237,
- 'icirc': 238,
- 'iexcl': 161,
- 'igrave': 236,
- 'image': 8465,
- 'infin': 8734,
- 'int': 8747,
- 'iota': 953,
- 'iquest': 191,
- 'isin': 8712,
- 'iuml': 239,
- 'kappa': 954,
- 'lArr': 8656,
- 'lambda': 955,
- 'lang': 9001,
- 'laquo': 171,
- 'larr': 8592,
- 'lceil': 8968,
- 'ldquo': 8220,
- 'le': 8804,
- 'lfloor': 8970,
- 'lowast': 8727,
- 'loz': 9674,
- 'lrm': 8206,
- 'lsaquo': 8249,
- 'lsquo': 8216,
- 'lt': 60,
- 'macr': 175,
- 'mdash': 8212,
- 'micro': 181,
- 'middot': 183,
- 'minus': 8722,
- 'mu': 956,
- 'nabla': 8711,
- 'nbsp': 160,
- 'ndash': 8211,
- 'ne': 8800,
- 'ni': 8715,
- 'not': 172,
- 'notin': 8713,
- 'nsub': 8836,
- 'ntilde': 241,
- 'nu': 957,
- 'oacute': 243,
- 'ocirc': 244,
- 'oelig': 339,
- 'ograve': 242,
- 'oline': 8254,
- 'omega': 969,
- 'omicron': 959,
- 'oplus': 8853,
- 'or': 8744,
- 'ordf': 170,
- 'ordm': 186,
- 'oslash': 248,
- 'otilde': 245,
- 'otimes': 8855,
- 'ouml': 246,
- 'para': 182,
- 'part': 8706,
- 'permil': 8240,
- 'perp': 8869,
- 'phi': 966,
- 'pi': 960,
- 'piv': 982,
- 'plusmn': 177,
- 'pound': 163,
- 'prime': 8242,
- 'prod': 8719,
- 'prop': 8733,
- 'psi': 968,
- 'quot': 34,
- 'rArr': 8658,
- 'radic': 8730,
- 'rang': 9002,
- 'raquo': 187,
- 'rarr': 8594,
- 'rceil': 8969,
- 'rdquo': 8221,
- 'real': 8476,
- 'reg': 174,
- 'rfloor': 8971,
- 'rho': 961,
- 'rlm': 8207,
- 'rsaquo': 8250,
- 'rsquo': 8217,
- 'sbquo': 8218,
- 'scaron': 353,
- 'sdot': 8901,
- 'sect': 167,
- 'shy': 173,
- 'sigma': 963,
- 'sigmaf': 962,
- 'sim': 8764,
- 'spades': 9824,
- 'sub': 8834,
- 'sube': 8838,
- 'sum': 8721,
- 'sup': 8835,
- 'sup1': 185,
- 'sup2': 178,
- 'sup3': 179,
- 'supe': 8839,
- 'szlig': 223,
- 'tau': 964,
- 'there4': 8756,
- 'theta': 952,
- 'thetasym': 977,
- 'thinsp': 8201,
- 'thorn': 254,
- 'tilde': 732,
- 'times': 215,
- 'trade': 8482,
- 'uArr': 8657,
- 'uacute': 250,
- 'uarr': 8593,
- 'ucirc': 251,
- 'ugrave': 249,
- 'uml': 168,
- 'upsih': 978,
- 'upsilon': 965,
- 'uuml': 252,
- 'weierp': 8472,
- 'xi': 958,
- 'yacute': 253,
- 'yen': 165,
- 'yuml': 255,
- 'zeta': 950,
- 'zwj': 8205,
- 'zwnj': 8204
+ "AElig": 198,
+ "Aacute": 193,
+ "Acirc": 194,
+ "Agrave": 192,
+ "Alpha": 913,
+ "Aring": 197,
+ "Atilde": 195,
+ "Auml": 196,
+ "Beta": 914,
+ "Ccedil": 199,
+ "Chi": 935,
+ "Dagger": 8225,
+ "Delta": 916,
+ "ETH": 208,
+ "Eacute": 201,
+ "Ecirc": 202,
+ "Egrave": 200,
+ "Epsilon": 917,
+ "Eta": 919,
+ "Euml": 203,
+ "Gamma": 915,
+ "Iacute": 205,
+ "Icirc": 206,
+ "Igrave": 204,
+ "Iota": 921,
+ "Iuml": 207,
+ "Kappa": 922,
+ "Lambda": 923,
+ "Mu": 924,
+ "Ntilde": 209,
+ "Nu": 925,
+ "OElig": 338,
+ "Oacute": 211,
+ "Ocirc": 212,
+ "Ograve": 210,
+ "Omega": 937,
+ "Omicron": 927,
+ "Oslash": 216,
+ "Otilde": 213,
+ "Ouml": 214,
+ "Phi": 934,
+ "Pi": 928,
+ "Prime": 8243,
+ "Psi": 936,
+ "Rho": 929,
+ "Scaron": 352,
+ "Sigma": 931,
+ "THORN": 222,
+ "Tau": 932,
+ "Theta": 920,
+ "Uacute": 218,
+ "Ucirc": 219,
+ "Ugrave": 217,
+ "Upsilon": 933,
+ "Uuml": 220,
+ "Xi": 926,
+ "Yacute": 221,
+ "Yuml": 376,
+ "Zeta": 918,
+ "aacute": 225,
+ "acirc": 226,
+ "acute": 180,
+ "aelig": 230,
+ "agrave": 224,
+ "alefsym": 8501,
+ "alpha": 945,
+ "amp": 38,
+ "and": 8743,
+ "ang": 8736,
+ "apos": 39,
+ "aring": 229,
+ "asymp": 8776,
+ "atilde": 227,
+ "auml": 228,
+ "bdquo": 8222,
+ "beta": 946,
+ "brvbar": 166,
+ "bull": 8226,
+ "cap": 8745,
+ "ccedil": 231,
+ "cedil": 184,
+ "cent": 162,
+ "chi": 967,
+ "circ": 710,
+ "clubs": 9827,
+ "cong": 8773,
+ "copy": 169,
+ "crarr": 8629,
+ "cup": 8746,
+ "curren": 164,
+ "dArr": 8659,
+ "dagger": 8224,
+ "darr": 8595,
+ "deg": 176,
+ "delta": 948,
+ "diams": 9830,
+ "divide": 247,
+ "eacute": 233,
+ "ecirc": 234,
+ "egrave": 232,
+ "empty": 8709,
+ "emsp": 8195,
+ "ensp": 8194,
+ "epsilon": 949,
+ "equiv": 8801,
+ "eta": 951,
+ "eth": 240,
+ "euml": 235,
+ "euro": 8364,
+ "exist": 8707,
+ "fnof": 402,
+ "forall": 8704,
+ "frac12": 189,
+ "frac14": 188,
+ "frac34": 190,
+ "frasl": 8260,
+ "gamma": 947,
+ "ge": 8805,
+ "gt": 62,
+ "hArr": 8660,
+ "harr": 8596,
+ "hearts": 9829,
+ "hellip": 8230,
+ "iacute": 237,
+ "icirc": 238,
+ "iexcl": 161,
+ "igrave": 236,
+ "image": 8465,
+ "infin": 8734,
+ "int": 8747,
+ "iota": 953,
+ "iquest": 191,
+ "isin": 8712,
+ "iuml": 239,
+ "kappa": 954,
+ "lArr": 8656,
+ "lambda": 955,
+ "lang": 9001,
+ "laquo": 171,
+ "larr": 8592,
+ "lceil": 8968,
+ "ldquo": 8220,
+ "le": 8804,
+ "lfloor": 8970,
+ "lowast": 8727,
+ "loz": 9674,
+ "lrm": 8206,
+ "lsaquo": 8249,
+ "lsquo": 8216,
+ "lt": 60,
+ "macr": 175,
+ "mdash": 8212,
+ "micro": 181,
+ "middot": 183,
+ "minus": 8722,
+ "mu": 956,
+ "nabla": 8711,
+ "nbsp": 160,
+ "ndash": 8211,
+ "ne": 8800,
+ "ni": 8715,
+ "not": 172,
+ "notin": 8713,
+ "nsub": 8836,
+ "ntilde": 241,
+ "nu": 957,
+ "oacute": 243,
+ "ocirc": 244,
+ "oelig": 339,
+ "ograve": 242,
+ "oline": 8254,
+ "omega": 969,
+ "omicron": 959,
+ "oplus": 8853,
+ "or": 8744,
+ "ordf": 170,
+ "ordm": 186,
+ "oslash": 248,
+ "otilde": 245,
+ "otimes": 8855,
+ "ouml": 246,
+ "para": 182,
+ "part": 8706,
+ "permil": 8240,
+ "perp": 8869,
+ "phi": 966,
+ "pi": 960,
+ "piv": 982,
+ "plusmn": 177,
+ "pound": 163,
+ "prime": 8242,
+ "prod": 8719,
+ "prop": 8733,
+ "psi": 968,
+ "quot": 34,
+ "rArr": 8658,
+ "radic": 8730,
+ "rang": 9002,
+ "raquo": 187,
+ "rarr": 8594,
+ "rceil": 8969,
+ "rdquo": 8221,
+ "real": 8476,
+ "reg": 174,
+ "rfloor": 8971,
+ "rho": 961,
+ "rlm": 8207,
+ "rsaquo": 8250,
+ "rsquo": 8217,
+ "sbquo": 8218,
+ "scaron": 353,
+ "sdot": 8901,
+ "sect": 167,
+ "shy": 173,
+ "sigma": 963,
+ "sigmaf": 962,
+ "sim": 8764,
+ "spades": 9824,
+ "sub": 8834,
+ "sube": 8838,
+ "sum": 8721,
+ "sup": 8835,
+ "sup1": 185,
+ "sup2": 178,
+ "sup3": 179,
+ "supe": 8839,
+ "szlig": 223,
+ "tau": 964,
+ "there4": 8756,
+ "theta": 952,
+ "thetasym": 977,
+ "thinsp": 8201,
+ "thorn": 254,
+ "tilde": 732,
+ "times": 215,
+ "trade": 8482,
+ "uArr": 8657,
+ "uacute": 250,
+ "uarr": 8593,
+ "ucirc": 251,
+ "ugrave": 249,
+ "uml": 168,
+ "upsih": 978,
+ "upsilon": 965,
+ "uuml": 252,
+ "weierp": 8472,
+ "xi": 958,
+ "yacute": 253,
+ "yen": 165,
+ "yuml": 255,
+ "zeta": 950,
+ "zwj": 8205,
+ "zwnj": 8204,
}
diff --git a/lib/spack/external/markupsafe/_native.py b/lib/spack/external/markupsafe/_native.py
index 5e83f10a11..cd08752cd8 100644
--- a/lib/spack/external/markupsafe/_native.py
+++ b/lib/spack/external/markupsafe/_native.py
@@ -1,36 +1,49 @@
# -*- coding: utf-8 -*-
"""
- markupsafe._native
- ~~~~~~~~~~~~~~~~~~
+markupsafe._native
+~~~~~~~~~~~~~~~~~~
- Native Python implementation the C module is not compiled.
+Native Python implementation used when the C module is not compiled.
- :copyright: (c) 2010 by Armin Ronacher.
- :license: BSD, see LICENSE for more details.
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
"""
-from markupsafe import Markup
-from markupsafe._compat import text_type
+from . import Markup
+from ._compat import text_type
def escape(s):
- """Convert the characters &, <, >, ' and " in string s to HTML-safe
- sequences. Use this if you need to display text that might contain
- such characters in HTML. Marks return value as markup string.
+ """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in
+ the string with HTML-safe sequences. Use this if you need to display
+ text that might contain such characters in HTML.
+
+ If the object has an ``__html__`` method, it is called and the
+ return value is assumed to already be safe for HTML.
+
+ :param s: An object to be converted to a string and escaped.
+ :return: A :class:`Markup` string with the escaped text.
"""
- if hasattr(s, '__html__'):
- return s.__html__()
- return Markup(text_type(s)
- .replace('&', '&amp;')
- .replace('>', '&gt;')
- .replace('<', '&lt;')
- .replace("'", '&#39;')
- .replace('"', '&#34;')
+ if hasattr(s, "__html__"):
+ return Markup(s.__html__())
+ return Markup(
+ text_type(s)
+ .replace("&", "&amp;")
+ .replace(">", "&gt;")
+ .replace("<", "&lt;")
+ .replace("'", "&#39;")
+ .replace('"', "&#34;")
)
def escape_silent(s):
- """Like :func:`escape` but converts `None` into an empty
- markup string.
+ """Like :func:`escape` but treats ``None`` as the empty string.
+ Useful with optional values, as otherwise you get the string
+ ``'None'`` when the value is ``None``.
+
+ >>> escape(None)
+ Markup('None')
+ >>> escape_silent(None)
+ Markup('')
"""
if s is None:
return Markup()
@@ -38,8 +51,18 @@ def escape_silent(s):
def soft_unicode(s):
- """Make a string unicode if it isn't already. That way a markup
- string is not converted back to unicode.
+ """Convert an object to a string if it isn't already. This preserves
+ a :class:`Markup` string rather than converting it back to a basic
+ string, so it will still be marked as safe and won't be escaped
+ again.
+
+ >>> value = escape('<User 1>')
+ >>> value
+ Markup('&lt;User 1&gt;')
+ >>> escape(str(value))
+ Markup('&amp;lt;User 1&amp;gt;')
+ >>> escape(soft_unicode(value))
+ Markup('&lt;User 1&gt;')
"""
if not isinstance(s, text_type):
s = text_type(s)
diff --git a/lib/spack/external/ordereddict_backport.py b/lib/spack/external/ordereddict_backport.py
deleted file mode 100644
index 3c7f012e9e..0000000000
--- a/lib/spack/external/ordereddict_backport.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-"""This file dispatches to the correct implementation of OrderedDict."""
-
-# TODO: this file, along with py26/ordereddict.py, can be removed when
-# TODO: support for python 2.6 will be dropped
-
-# Removing this import will make python 2.6
-# fail on import of ordereddict
-from __future__ import absolute_import
-
-import sys
-
-if sys.version_info[:2] == (2, 6):
- import ordereddict
- OrderedDict = ordereddict.OrderedDict
-else:
- import collections
- OrderedDict = collections.OrderedDict
diff --git a/lib/spack/external/py2/functools32/LICENSE b/lib/spack/external/py2/functools32/LICENSE
new file mode 100644
index 0000000000..43388e7e13
--- /dev/null
+++ b/lib/spack/external/py2/functools32/LICENSE
@@ -0,0 +1,289 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.2 2.1.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2.1 2.2 2002 PSF yes
+ 2.2.2 2.2.1 2002 PSF yes
+ 2.2.3 2.2.2 2003 PSF yes
+ 2.3 2.2.2 2002-2003 PSF yes
+ 2.3.1 2.3 2002-2003 PSF yes
+ 2.3.2 2.3.1 2002-2003 PSF yes
+ 2.3.3 2.3.2 2002-2003 PSF yes
+ 2.3.4 2.3.3 2004 PSF yes
+ 2.3.5 2.3.4 2005 PSF yes
+ 2.4 2.3 2004 PSF yes
+ 2.4.1 2.4 2005 PSF yes
+ 2.4.2 2.4.1 2005 PSF yes
+ 2.4.3 2.4.2 2006 PSF yes
+ 2.4.4 2.4.3 2006 PSF yes
+ 2.5 2.4 2006 PSF yes
+ 2.5.1 2.5 2007 PSF yes
+ 2.5.2 2.5.1 2008 PSF yes
+ 2.5.3 2.5.2 2008 PSF yes
+ 2.6 2.5 2008 PSF yes
+ 2.6.1 2.6 2008 PSF yes
+ 2.6.2 2.6.1 2009 PSF yes
+ 2.6.3 2.6.2 2009 PSF yes
+ 2.6.4 2.6.3 2009 PSF yes
+ 2.6.5 2.6.4 2010 PSF yes
+ 3.0 2.6 2008 PSF yes
+ 3.0.1 3.0 2009 PSF yes
+ 3.1 3.0.1 2009 PSF yes
+ 3.1.1 3.1 2009 PSF yes
+ 3.1.2 3.1.1 2010 PSF yes
+ 3.1.3 3.1.2 2010 PSF yes
+ 3.1.4 3.1.3 2011 PSF yes
+ 3.2 3.1 2011 PSF yes
+ 3.2.1 3.2 2011 PSF yes
+ 3.2.2 3.2.1 2011 PSF yes
+ 3.2.3 3.2.2 2012 PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
+alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/lib/spack/external/py2/functools32/__init__.py b/lib/spack/external/py2/functools32/__init__.py
new file mode 100644
index 0000000000..837f7fb651
--- /dev/null
+++ b/lib/spack/external/py2/functools32/__init__.py
@@ -0,0 +1 @@
+from .functools32 import *
diff --git a/lib/spack/external/py2/functools32/_dummy_thread32.py b/lib/spack/external/py2/functools32/_dummy_thread32.py
new file mode 100644
index 0000000000..8503b0e3dd
--- /dev/null
+++ b/lib/spack/external/py2/functools32/_dummy_thread32.py
@@ -0,0 +1,158 @@
+"""Drop-in replacement for the thread module.
+
+Meant to be used as a brain-dead substitute so that threaded code does
+not need to be rewritten for when the thread module is not present.
+
+Suggested usage is::
+
+ try:
+ try:
+ import _thread # Python >= 3
+ except:
+ import thread as _thread # Python < 3
+ except ImportError:
+ import _dummy_thread as _thread
+
+"""
+# Exports only things specified by thread documentation;
+# skipping obsolete synonyms allocate(), start_new(), exit_thread().
+__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
+ 'interrupt_main', 'LockType']
+
+# A dummy value
+TIMEOUT_MAX = 2**31
+
+# NOTE: this module can be imported early in the extension building process,
+# and so top level imports of other modules should be avoided. Instead, all
+# imports are done when needed on a function-by-function basis. Since threads
+# are disabled, the import lock should not be an issue anyway (??).
+
+class error(Exception):
+ """Dummy implementation of _thread.error."""
+
+ def __init__(self, *args):
+ self.args = args
+
+def start_new_thread(function, args, kwargs={}):
+ """Dummy implementation of _thread.start_new_thread().
+
+ Compatibility is maintained by making sure that ``args`` is a
+ tuple and ``kwargs`` is a dictionary. If an exception is raised
+ and it is SystemExit (which can be done by _thread.exit()) it is
+ caught and nothing is done; all other exceptions are printed out
+ by using traceback.print_exc().
+
+ If the executed function calls interrupt_main the KeyboardInterrupt will be
+ raised when the function returns.
+
+ """
+ if type(args) != type(tuple()):
+ raise TypeError("2nd arg must be a tuple")
+ if type(kwargs) != type(dict()):
+ raise TypeError("3rd arg must be a dict")
+ global _main
+ _main = False
+ try:
+ function(*args, **kwargs)
+ except SystemExit:
+ pass
+ except:
+ import traceback
+ traceback.print_exc()
+ _main = True
+ global _interrupt
+ if _interrupt:
+ _interrupt = False
+ raise KeyboardInterrupt
+
+def exit():
+ """Dummy implementation of _thread.exit()."""
+ raise SystemExit
+
+def get_ident():
+ """Dummy implementation of _thread.get_ident().
+
+ Since this module should only be used when _threadmodule is not
+ available, it is safe to assume that the current process is the
+ only thread. Thus a constant can be safely returned.
+ """
+ return -1
+
+def allocate_lock():
+ """Dummy implementation of _thread.allocate_lock()."""
+ return LockType()
+
+def stack_size(size=None):
+ """Dummy implementation of _thread.stack_size()."""
+ if size is not None:
+ raise error("setting thread stack size not supported")
+ return 0
+
+class LockType(object):
+ """Class implementing dummy implementation of _thread.LockType.
+
+ Compatibility is maintained by maintaining self.locked_status
+ which is a boolean that stores the state of the lock. Pickling of
+ the lock, though, should not be done since if the _thread module is
+ then used with an unpickled ``lock()`` from here problems could
+ occur from this class not having atomic methods.
+
+ """
+
+ def __init__(self):
+ self.locked_status = False
+
+ def acquire(self, waitflag=None, timeout=-1):
+ """Dummy implementation of acquire().
+
+ For blocking calls, self.locked_status is automatically set to
+ True and returned appropriately based on value of
+ ``waitflag``. If it is non-blocking, then the value is
+ actually checked and not set if it is already acquired. This
+ is all done so that threading.Condition's assert statements
+ aren't triggered and throw a little fit.
+
+ """
+ if waitflag is None or waitflag:
+ self.locked_status = True
+ return True
+ else:
+ if not self.locked_status:
+ self.locked_status = True
+ return True
+ else:
+ if timeout > 0:
+ import time
+ time.sleep(timeout)
+ return False
+
+ __enter__ = acquire
+
+ def __exit__(self, typ, val, tb):
+ self.release()
+
+ def release(self):
+ """Release the dummy lock."""
+ # XXX Perhaps shouldn't actually bother to test? Could lead
+ # to problems for complex, threaded code.
+ if not self.locked_status:
+ raise error
+ self.locked_status = False
+ return True
+
+ def locked(self):
+ return self.locked_status
+
+# Used to signal that interrupt_main was called in a "thread"
+_interrupt = False
+# True when not executing in a "thread"
+_main = True
+
+def interrupt_main():
+ """Set _interrupt flag to True to have start_new_thread raise
+ KeyboardInterrupt upon exiting."""
+ if _main:
+ raise KeyboardInterrupt
+ else:
+ global _interrupt
+ _interrupt = True
diff --git a/lib/spack/external/py2/functools32/functools32.py b/lib/spack/external/py2/functools32/functools32.py
new file mode 100644
index 0000000000..c44551fac0
--- /dev/null
+++ b/lib/spack/external/py2/functools32/functools32.py
@@ -0,0 +1,423 @@
+"""functools.py - Tools for working with functions and callable objects
+"""
+# Python module wrapper for _functools C module
+# to allow utilities written in Python to be added
+# to the functools module.
+# Written by Nick Coghlan <ncoghlan at gmail.com>
+# and Raymond Hettinger <python at rcn.com>
+# Copyright (C) 2006-2010 Python Software Foundation.
+# See C source code for _functools credits/copyright
+
+__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
+ 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
+
+from _functools import partial, reduce
+from collections import MutableMapping, namedtuple
+from .reprlib32 import recursive_repr as _recursive_repr
+from weakref import proxy as _proxy
+import sys as _sys
+try:
+ from thread import allocate_lock as Lock
+except ImportError:
+ from ._dummy_thread32 import allocate_lock as Lock
+
+################################################################################
+### OrderedDict
+################################################################################
+
+class _Link(object):
+ __slots__ = 'prev', 'next', 'key', '__weakref__'
+
+class OrderedDict(dict):
+ 'Dictionary that remembers insertion order'
+ # An inherited dict maps keys to values.
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
+ # The remaining methods are order-aware.
+ # Big-O running times for all methods are the same as regular dictionaries.
+
+ # The internal self.__map dict maps keys to links in a doubly linked list.
+ # The circular doubly linked list starts and ends with a sentinel element.
+ # The sentinel element never gets deleted (this simplifies the algorithm).
+ # The sentinel is in self.__hardroot with a weakref proxy in self.__root.
+ # The prev links are weakref proxies (to prevent circular references).
+ # Individual links are kept alive by the hard reference in self.__map.
+ # Those hard references disappear when a key is deleted from an OrderedDict.
+
+ def __init__(self, *args, **kwds):
+ '''Initialize an ordered dictionary. The signature is the same as
+ regular dictionaries, but keyword arguments are not recommended because
+ their insertion order is arbitrary.
+
+ '''
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__root
+ except AttributeError:
+ self.__hardroot = _Link()
+ self.__root = root = _proxy(self.__hardroot)
+ root.prev = root.next = root
+ self.__map = {}
+ self.__update(*args, **kwds)
+
+ def __setitem__(self, key, value,
+ dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
+ 'od.__setitem__(i, y) <==> od[i]=y'
+ # Setting a new item creates a new link at the end of the linked list,
+ # and the inherited dictionary is updated with the new key/value pair.
+ if key not in self:
+ self.__map[key] = link = Link()
+ root = self.__root
+ last = root.prev
+ link.prev, link.next, link.key = last, root, key
+ last.next = link
+ root.prev = proxy(link)
+ dict_setitem(self, key, value)
+
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
+ 'od.__delitem__(y) <==> del od[y]'
+ # Deleting an existing item uses self.__map to find the link which gets
+ # removed by updating the links in the predecessor and successor nodes.
+ dict_delitem(self, key)
+ link = self.__map.pop(key)
+ link_prev = link.prev
+ link_next = link.next
+ link_prev.next = link_next
+ link_next.prev = link_prev
+
+ def __iter__(self):
+ 'od.__iter__() <==> iter(od)'
+ # Traverse the linked list in order.
+ root = self.__root
+ curr = root.next
+ while curr is not root:
+ yield curr.key
+ curr = curr.next
+
+ def __reversed__(self):
+ 'od.__reversed__() <==> reversed(od)'
+ # Traverse the linked list in reverse order.
+ root = self.__root
+ curr = root.prev
+ while curr is not root:
+ yield curr.key
+ curr = curr.prev
+
+ def clear(self):
+ 'od.clear() -> None. Remove all items from od.'
+ root = self.__root
+ root.prev = root.next = root
+ self.__map.clear()
+ dict.clear(self)
+
+ def popitem(self, last=True):
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+ '''
+ if not self:
+ raise KeyError('dictionary is empty')
+ root = self.__root
+ if last:
+ link = root.prev
+ link_prev = link.prev
+ link_prev.next = root
+ root.prev = link_prev
+ else:
+ link = root.next
+ link_next = link.next
+ root.next = link_next
+ link_next.prev = root
+ key = link.key
+ del self.__map[key]
+ value = dict.pop(self, key)
+ return key, value
+
+ def move_to_end(self, key, last=True):
+ '''Move an existing element to the end (or beginning if last==False).
+
+ Raises KeyError if the element does not exist.
+ When last=True, acts like a fast version of self[key]=self.pop(key).
+
+ '''
+ link = self.__map[key]
+ link_prev = link.prev
+ link_next = link.next
+ link_prev.next = link_next
+ link_next.prev = link_prev
+ root = self.__root
+ if last:
+ last = root.prev
+ link.prev = last
+ link.next = root
+ last.next = root.prev = link
+ else:
+ first = root.next
+ link.prev = root
+ link.next = first
+ root.next = first.prev = link
+
+ def __sizeof__(self):
+ sizeof = _sys.getsizeof
+ n = len(self) + 1 # number of links including root
+ size = sizeof(self.__dict__) # instance dictionary
+ size += sizeof(self.__map) * 2 # internal dict and inherited dict
+ size += sizeof(self.__hardroot) * n # link objects
+ size += sizeof(self.__root) * n # proxy objects
+ return size
+
+ update = __update = MutableMapping.update
+ keys = MutableMapping.keys
+ values = MutableMapping.values
+ items = MutableMapping.items
+ __ne__ = MutableMapping.__ne__
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding
+ value. If key is not found, d is returned if given, otherwise KeyError
+ is raised.
+
+ '''
+ if key in self:
+ result = self[key]
+ del self[key]
+ return result
+ if default is self.__marker:
+ raise KeyError(key)
+ return default
+
+ def setdefault(self, key, default=None):
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+ if key in self:
+ return self[key]
+ self[key] = default
+ return default
+
+ @_recursive_repr()
+ def __repr__(self):
+ 'od.__repr__() <==> repr(od)'
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, list(self.items()))
+
+ def __reduce__(self):
+ 'Return state information for pickling'
+ items = [[k, self[k]] for k in self]
+ inst_dict = vars(self).copy()
+ for k in vars(OrderedDict()):
+ inst_dict.pop(k, None)
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def copy(self):
+ 'od.copy() -> a shallow copy of od'
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
+ If not specified, the value defaults to None.
+
+ '''
+ self = cls()
+ for key in iterable:
+ self[key] = value
+ return self
+
+ def __eq__(self, other):
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+ while comparison to a regular mapping is order-insensitive.
+
+ '''
+ if isinstance(other, OrderedDict):
+ return len(self)==len(other) and \
+ all(p==q for p, q in zip(self.items(), other.items()))
+ return dict.__eq__(self, other)
+
+# update_wrapper() and wraps() are tools to help write
+# wrapper functions that can handle naive introspection
+
+WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
+WRAPPER_UPDATES = ('__dict__',)
+def update_wrapper(wrapper,
+ wrapped,
+ assigned = WRAPPER_ASSIGNMENTS,
+ updated = WRAPPER_UPDATES):
+ """Update a wrapper function to look like the wrapped function
+
+ wrapper is the function to be updated
+ wrapped is the original function
+ assigned is a tuple naming the attributes assigned directly
+ from the wrapped function to the wrapper function (defaults to
+ functools.WRAPPER_ASSIGNMENTS)
+ updated is a tuple naming the attributes of the wrapper that
+ are updated with the corresponding attribute from the wrapped
+ function (defaults to functools.WRAPPER_UPDATES)
+ """
+ wrapper.__wrapped__ = wrapped
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ pass
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ # Return the wrapper so this can be used as a decorator via partial()
+ return wrapper
+
+def wraps(wrapped,
+ assigned = WRAPPER_ASSIGNMENTS,
+ updated = WRAPPER_UPDATES):
+ """Decorator factory to apply update_wrapper() to a wrapper function
+
+ Returns a decorator that invokes update_wrapper() with the decorated
+ function as the wrapper argument and the arguments to wraps() as the
+ remaining arguments. Default arguments are as for update_wrapper().
+ This is a convenience function to simplify applying partial() to
+ update_wrapper().
+ """
+ return partial(update_wrapper, wrapped=wrapped,
+ assigned=assigned, updated=updated)
+
+def total_ordering(cls):
+ """Class decorator that fills in missing ordering methods"""
+ convert = {
+ '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
+ ('__le__', lambda self, other: self < other or self == other),
+ ('__ge__', lambda self, other: not self < other)],
+ '__le__': [('__ge__', lambda self, other: not self <= other or self == other),
+ ('__lt__', lambda self, other: self <= other and not self == other),
+ ('__gt__', lambda self, other: not self <= other)],
+ '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
+ ('__ge__', lambda self, other: self > other or self == other),
+ ('__le__', lambda self, other: not self > other)],
+ '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
+ ('__gt__', lambda self, other: self >= other and not self == other),
+ ('__lt__', lambda self, other: not self >= other)]
+ }
+ roots = set(dir(cls)) & set(convert)
+ if not roots:
+ raise ValueError('must define at least one ordering operation: < > <= >=')
+ root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
+ for opname, opfunc in convert[root]:
+ if opname not in roots:
+ opfunc.__name__ = opname
+ opfunc.__doc__ = getattr(int, opname).__doc__
+ setattr(cls, opname, opfunc)
+ return cls
+
+def cmp_to_key(mycmp):
+ """Convert a cmp= function into a key= function"""
+ class K(object):
+ __slots__ = ['obj']
+ def __init__(self, obj):
+ self.obj = obj
+ def __lt__(self, other):
+ return mycmp(self.obj, other.obj) < 0
+ def __gt__(self, other):
+ return mycmp(self.obj, other.obj) > 0
+ def __eq__(self, other):
+ return mycmp(self.obj, other.obj) == 0
+ def __le__(self, other):
+ return mycmp(self.obj, other.obj) <= 0
+ def __ge__(self, other):
+ return mycmp(self.obj, other.obj) >= 0
+ def __ne__(self, other):
+ return mycmp(self.obj, other.obj) != 0
+ __hash__ = None
+ return K
+
+_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
+
+def lru_cache(maxsize=100):
+ """Least-recently-used cache decorator.
+
+ If *maxsize* is set to None, the LRU features are disabled and the cache
+ can grow without bound.
+
+ Arguments to the cached function must be hashable.
+
+ View the cache statistics named tuple (hits, misses, maxsize, currsize) with
+ f.cache_info(). Clear the cache and statistics with f.cache_clear().
+ Access the underlying function with f.__wrapped__.
+
+ See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
+
+ """
+ # Users should only access the lru_cache through its public API:
+ # cache_info, cache_clear, and f.__wrapped__
+ # The internals of the lru_cache are encapsulated for thread safety and
+ # to allow the implementation to change (including a possible C version).
+
+ def decorating_function(user_function,
+ tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
+
+ hits, misses = [0], [0]
+ kwd_mark = (object(),) # separates positional and keyword args
+ lock = Lock() # needed because OrderedDict isn't threadsafe
+
+ if maxsize is None:
+ cache = dict() # simple cache without ordering or size limit
+
+ @wraps(user_function)
+ def wrapper(*args, **kwds):
+ key = args
+ if kwds:
+ key += kwd_mark + tuple(sorted(kwds.items()))
+ try:
+ result = cache[key]
+ hits[0] += 1
+ return result
+ except KeyError:
+ pass
+ result = user_function(*args, **kwds)
+ cache[key] = result
+ misses[0] += 1
+ return result
+ else:
+ cache = OrderedDict() # ordered least recent to most recent
+ cache_popitem = cache.popitem
+ cache_renew = cache.move_to_end
+
+ @wraps(user_function)
+ def wrapper(*args, **kwds):
+ key = args
+ if kwds:
+ key += kwd_mark + tuple(sorted(kwds.items()))
+ with lock:
+ try:
+ result = cache[key]
+ cache_renew(key) # record recent use of this key
+ hits[0] += 1
+ return result
+ except KeyError:
+ pass
+ result = user_function(*args, **kwds)
+ with lock:
+ cache[key] = result # record recent use of this key
+ misses[0] += 1
+ if len(cache) > maxsize:
+ cache_popitem(0) # purge least recently used cache entry
+ return result
+
+ def cache_info():
+ """Report cache statistics"""
+ with lock:
+ return _CacheInfo(hits[0], misses[0], maxsize, len(cache))
+
+ def cache_clear():
+ """Clear the cache and cache statistics"""
+ with lock:
+ cache.clear()
+ hits[0] = misses[0] = 0
+
+ wrapper.cache_info = cache_info
+ wrapper.cache_clear = cache_clear
+ return wrapper
+
+ return decorating_function
diff --git a/lib/spack/external/py2/functools32/reprlib32.py b/lib/spack/external/py2/functools32/reprlib32.py
new file mode 100644
index 0000000000..af919758ca
--- /dev/null
+++ b/lib/spack/external/py2/functools32/reprlib32.py
@@ -0,0 +1,157 @@
+"""Redo the builtin repr() (representation) but with limits on most sizes."""
+
+__all__ = ["Repr", "repr", "recursive_repr"]
+
+import __builtin__ as builtins
+from itertools import islice
+try:
+ from thread import get_ident
+except ImportError:
+ from _dummy_thread32 import get_ident
+
+def recursive_repr(fillvalue='...'):
+ 'Decorator to make a repr function return fillvalue for a recursive call'
+
+ def decorating_function(user_function):
+ repr_running = set()
+
+ def wrapper(self):
+ key = id(self), get_ident()
+ if key in repr_running:
+ return fillvalue
+ repr_running.add(key)
+ try:
+ result = user_function(self)
+ finally:
+ repr_running.discard(key)
+ return result
+
+ # Can't use functools.wraps() here because of bootstrap issues
+ wrapper.__module__ = getattr(user_function, '__module__')
+ wrapper.__doc__ = getattr(user_function, '__doc__')
+ wrapper.__name__ = getattr(user_function, '__name__')
+ wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
+ return wrapper
+
+ return decorating_function
+
+class Repr:
+
+ def __init__(self):
+ self.maxlevel = 6
+ self.maxtuple = 6
+ self.maxlist = 6
+ self.maxarray = 5
+ self.maxdict = 4
+ self.maxset = 6
+ self.maxfrozenset = 6
+ self.maxdeque = 6
+ self.maxstring = 30
+ self.maxlong = 40
+ self.maxother = 30
+
+ def repr(self, x):
+ return self.repr1(x, self.maxlevel)
+
+ def repr1(self, x, level):
+ typename = type(x).__name__
+ if ' ' in typename:
+ parts = typename.split()
+ typename = '_'.join(parts)
+ if hasattr(self, 'repr_' + typename):
+ return getattr(self, 'repr_' + typename)(x, level)
+ else:
+ return self.repr_instance(x, level)
+
+ def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
+ n = len(x)
+ if level <= 0 and n:
+ s = '...'
+ else:
+ newlevel = level - 1
+ repr1 = self.repr1
+ pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
+ if n > maxiter: pieces.append('...')
+ s = ', '.join(pieces)
+ if n == 1 and trail: right = trail + right
+ return '%s%s%s' % (left, s, right)
+
+ def repr_tuple(self, x, level):
+ return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
+
+ def repr_list(self, x, level):
+ return self._repr_iterable(x, level, '[', ']', self.maxlist)
+
+ def repr_array(self, x, level):
+ header = "array('%s', [" % x.typecode
+ return self._repr_iterable(x, level, header, '])', self.maxarray)
+
+ def repr_set(self, x, level):
+ x = _possibly_sorted(x)
+ return self._repr_iterable(x, level, 'set([', '])', self.maxset)
+
+ def repr_frozenset(self, x, level):
+ x = _possibly_sorted(x)
+ return self._repr_iterable(x, level, 'frozenset([', '])',
+ self.maxfrozenset)
+
+ def repr_deque(self, x, level):
+ return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
+
+ def repr_dict(self, x, level):
+ n = len(x)
+ if n == 0: return '{}'
+ if level <= 0: return '{...}'
+ newlevel = level - 1
+ repr1 = self.repr1
+ pieces = []
+ for key in islice(_possibly_sorted(x), self.maxdict):
+ keyrepr = repr1(key, newlevel)
+ valrepr = repr1(x[key], newlevel)
+ pieces.append('%s: %s' % (keyrepr, valrepr))
+ if n > self.maxdict: pieces.append('...')
+ s = ', '.join(pieces)
+ return '{%s}' % (s,)
+
+ def repr_str(self, x, level):
+ s = builtins.repr(x[:self.maxstring])
+ if len(s) > self.maxstring:
+ i = max(0, (self.maxstring-3)//2)
+ j = max(0, self.maxstring-3-i)
+ s = builtins.repr(x[:i] + x[len(x)-j:])
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+ def repr_int(self, x, level):
+ s = builtins.repr(x) # XXX Hope this isn't too slow...
+ if len(s) > self.maxlong:
+ i = max(0, (self.maxlong-3)//2)
+ j = max(0, self.maxlong-3-i)
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+ def repr_instance(self, x, level):
+ try:
+ s = builtins.repr(x)
+ # Bugs in x.__repr__() can cause arbitrary
+ # exceptions -- then make up something
+ except Exception:
+ return '<%s instance at %x>' % (x.__class__.__name__, id(x))
+ if len(s) > self.maxother:
+ i = max(0, (self.maxother-3)//2)
+ j = max(0, self.maxother-3-i)
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+
+def _possibly_sorted(x):
+ # Since not all sequences of items can be sorted and comparison
+ # functions may raise arbitrary exceptions, return an unsorted
+ # sequence in that case.
+ try:
+ return sorted(x)
+ except Exception:
+ return list(x)
+
+aRepr = Repr()
+repr = aRepr.repr
diff --git a/lib/spack/external/py26/ordereddict.py b/lib/spack/external/py26/ordereddict.py
deleted file mode 100644
index 7242b5060d..0000000000
--- a/lib/spack/external/py26/ordereddict.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Copyright (c) 2009 Raymond Hettinger
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation files
-# (the "Software"), to deal in the Software without restriction,
-# including without limitation the rights to use, copy, modify, merge,
-# publish, distribute, sublicense, and/or sell copies of the Software,
-# and to permit persons to whom the Software is furnished to do so,
-# subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-
-from UserDict import DictMixin
-
-class OrderedDict(dict, DictMixin):
-
- def __init__(self, *args, **kwds):
- if len(args) > 1:
- raise TypeError('expected at most 1 arguments, got %d' % len(args))
- try:
- self.__end
- except AttributeError:
- self.clear()
- self.update(*args, **kwds)
-
- def clear(self):
- self.__end = end = []
- end += [None, end, end] # sentinel node for doubly linked list
- self.__map = {} # key --> [key, prev, next]
- dict.clear(self)
-
- def __setitem__(self, key, value):
- if key not in self:
- end = self.__end
- curr = end[1]
- curr[2] = end[1] = self.__map[key] = [key, curr, end]
- dict.__setitem__(self, key, value)
-
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- key, prev, next = self.__map.pop(key)
- prev[2] = next
- next[1] = prev
-
- def __iter__(self):
- end = self.__end
- curr = end[2]
- while curr is not end:
- yield curr[0]
- curr = curr[2]
-
- def __reversed__(self):
- end = self.__end
- curr = end[1]
- while curr is not end:
- yield curr[0]
- curr = curr[1]
-
- def popitem(self, last=True):
- if not self:
- raise KeyError('dictionary is empty')
- if last:
- key = reversed(self).next()
- else:
- key = iter(self).next()
- value = self.pop(key)
- return key, value
-
- def __reduce__(self):
- items = [[k, self[k]] for k in self]
- tmp = self.__map, self.__end
- del self.__map, self.__end
- inst_dict = vars(self).copy()
- self.__map, self.__end = tmp
- if inst_dict:
- return (self.__class__, (items,), inst_dict)
- return self.__class__, (items,)
-
- def keys(self):
- return list(self)
-
- setdefault = DictMixin.setdefault
- update = DictMixin.update
- pop = DictMixin.pop
- values = DictMixin.values
- items = DictMixin.items
- iterkeys = DictMixin.iterkeys
- itervalues = DictMixin.itervalues
- iteritems = DictMixin.iteritems
-
- def __repr__(self):
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, self.items())
-
- def copy(self):
- return self.__class__(self)
-
- @classmethod
- def fromkeys(cls, iterable, value=None):
- d = cls()
- for key in iterable:
- d[key] = value
- return d
-
- def __eq__(self, other):
- if isinstance(other, OrderedDict):
- if len(self) != len(other):
- return False
- for p, q in zip(self.items(), other.items()):
- if p != q:
- return False
- return True
- return dict.__eq__(self, other)
-
- def __ne__(self, other):
- return not self == other
diff --git a/lib/spack/external/pyrsistent/LICENSE b/lib/spack/external/pyrsistent/LICENSE
new file mode 100644
index 0000000000..6609e4c05a
--- /dev/null
+++ b/lib/spack/external/pyrsistent/LICENSE
@@ -0,0 +1,22 @@
+Copyright (c) 2019 Tobias Gustafsson
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/lib/spack/external/pyrsistent/__init__.py b/lib/spack/external/pyrsistent/__init__.py
new file mode 100644
index 0000000000..6e610c1ddb
--- /dev/null
+++ b/lib/spack/external/pyrsistent/__init__.py
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+
+from pyrsistent._pmap import pmap
+
+
+__all__ = ('pmap',)
diff --git a/lib/spack/external/pyrsistent/_compat.py b/lib/spack/external/pyrsistent/_compat.py
new file mode 100644
index 0000000000..e728586afe
--- /dev/null
+++ b/lib/spack/external/pyrsistent/_compat.py
@@ -0,0 +1,31 @@
+from six import string_types
+
+
+# enum compat
+try:
+ from enum import Enum
+except:
+ class Enum(object): pass
+ # no objects will be instances of this class
+
+# collections compat
+try:
+ from collections.abc import (
+ Container,
+ Hashable,
+ Iterable,
+ Mapping,
+ Sequence,
+ Set,
+ Sized,
+ )
+except ImportError:
+ from collections import (
+ Container,
+ Hashable,
+ Iterable,
+ Mapping,
+ Sequence,
+ Set,
+ Sized,
+ )
diff --git a/lib/spack/external/pyrsistent/_pmap.py b/lib/spack/external/pyrsistent/_pmap.py
new file mode 100644
index 0000000000..e8a0ec53f8
--- /dev/null
+++ b/lib/spack/external/pyrsistent/_pmap.py
@@ -0,0 +1,460 @@
+from ._compat import Mapping, Hashable
+from itertools import chain
+import six
+from pyrsistent._pvector import pvector
+from pyrsistent._transformations import transform
+
+
+class PMap(object):
+ """
+ Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to
+ create an instance.
+
+ Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer
+ re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are
+ hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of
+ the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid
+ excessive hash collisions.
+
+ This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the
+ semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
+ for example assignments and deletion of values.
+
+ PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for
+ element access.
+
+ Random access and insert is log32(n) where n is the size of the map.
+
+ The following are examples of some common operations on persistent maps
+
+ >>> m1 = m(a=1, b=3)
+ >>> m2 = m1.set('c', 3)
+ >>> m3 = m2.remove('a')
+ >>> m1
+ pmap({'b': 3, 'a': 1})
+ >>> m2
+ pmap({'c': 3, 'b': 3, 'a': 1})
+ >>> m3
+ pmap({'c': 3, 'b': 3})
+ >>> m3['c']
+ 3
+ >>> m3.c
+ 3
+ """
+ __slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash')
+
+ def __new__(cls, size, buckets):
+ self = super(PMap, cls).__new__(cls)
+ self._size = size
+ self._buckets = buckets
+ return self
+
+ @staticmethod
+ def _get_bucket(buckets, key):
+ index = hash(key) % len(buckets)
+ bucket = buckets[index]
+ return index, bucket
+
+ @staticmethod
+ def _getitem(buckets, key):
+ _, bucket = PMap._get_bucket(buckets, key)
+ if bucket:
+ for k, v in bucket:
+ if k == key:
+ return v
+
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ return PMap._getitem(self._buckets, key)
+
+ @staticmethod
+ def _contains(buckets, key):
+ _, bucket = PMap._get_bucket(buckets, key)
+ if bucket:
+ for k, _ in bucket:
+ if k == key:
+ return True
+
+ return False
+
+ return False
+
+ def __contains__(self, key):
+ return self._contains(self._buckets, key)
+
+ get = Mapping.get
+
+ def __iter__(self):
+ return self.iterkeys()
+
+ def __getattr__(self, key):
+ try:
+ return self[key]
+ except KeyError:
+ raise AttributeError(
+ "{0} has no attribute '{1}'".format(type(self).__name__, key)
+ )
+
+ def iterkeys(self):
+ for k, _ in self.iteritems():
+ yield k
+
+ # These are more efficient implementations compared to the original
+ # methods that are based on the keys iterator and then calls the
+ # accessor functions to access the value for the corresponding key
+ def itervalues(self):
+ for _, v in self.iteritems():
+ yield v
+
+ def iteritems(self):
+ for bucket in self._buckets:
+ if bucket:
+ for k, v in bucket:
+ yield k, v
+
+ def values(self):
+ return pvector(self.itervalues())
+
+ def keys(self):
+ return pvector(self.iterkeys())
+
+ def items(self):
+ return pvector(self.iteritems())
+
+ def __len__(self):
+ return self._size
+
+ def __repr__(self):
+ return 'pmap({0})'.format(str(dict(self)))
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if not isinstance(other, Mapping):
+ return NotImplemented
+ if len(self) != len(other):
+ return False
+ if isinstance(other, PMap):
+ if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash')
+ and self._cached_hash != other._cached_hash):
+ return False
+ if self._buckets == other._buckets:
+ return True
+ return dict(self.iteritems()) == dict(other.iteritems())
+ elif isinstance(other, dict):
+ return dict(self.iteritems()) == other
+ return dict(self.iteritems()) == dict(six.iteritems(other))
+
+ __ne__ = Mapping.__ne__
+
+ def __lt__(self, other):
+ raise TypeError('PMaps are not orderable')
+
+ __le__ = __lt__
+ __gt__ = __lt__
+ __ge__ = __lt__
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __hash__(self):
+ if not hasattr(self, '_cached_hash'):
+ self._cached_hash = hash(frozenset(self.iteritems()))
+ return self._cached_hash
+
+ def set(self, key, val):
+ """
+ Return a new PMap with key and val inserted.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m2 = m1.set('a', 3)
+ >>> m3 = m1.set('c' ,4)
+ >>> m1
+ pmap({'b': 2, 'a': 1})
+ >>> m2
+ pmap({'b': 2, 'a': 3})
+ >>> m3
+ pmap({'c': 4, 'b': 2, 'a': 1})
+ """
+ return self.evolver().set(key, val).persistent()
+
+ def remove(self, key):
+ """
+ Return a new PMap without the element specified by key. Raises KeyError if the element
+ is not present.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.remove('a')
+ pmap({'b': 2})
+ """
+ return self.evolver().remove(key).persistent()
+
+ def discard(self, key):
+ """
+ Return a new PMap without the element specified by key. Returns reference to itself
+ if element is not present.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.discard('a')
+ pmap({'b': 2})
+ >>> m1 is m1.discard('c')
+ True
+ """
+ try:
+ return self.remove(key)
+ except KeyError:
+ return self
+
+ def update(self, *maps):
+ """
+ Return a new PMap with the items in Mappings inserted. If the same key is present in multiple
+ maps the rightmost (last) value is inserted.
+
+ >>> m1 = m(a=1, b=2)
+ >>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35})
+ pmap({'c': 3, 'b': 2, 'a': 17, 'd': 35})
+ """
+ return self.update_with(lambda l, r: r, *maps)
+
+ def update_with(self, update_fn, *maps):
+ """
+ Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple
+ maps the values will be merged using merge_fn going from left to right.
+
+ >>> from operator import add
+ >>> m1 = m(a=1, b=2)
+ >>> m1.update_with(add, m(a=2))
+ pmap({'b': 2, 'a': 3})
+
+ The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost.
+
+ >>> m1 = m(a=1)
+ >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3})
+ pmap({'a': 1})
+ """
+ evolver = self.evolver()
+ for map in maps:
+ for key, value in map.items():
+ evolver.set(key, update_fn(evolver[key], value) if key in evolver else value)
+
+ return evolver.persistent()
+
+ def __add__(self, other):
+ return self.update(other)
+
+ def __reduce__(self):
+ # Pickling support
+ return pmap, (dict(self),)
+
+ def transform(self, *transformations):
+ """
+ Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
+ consists of two parts. One match expression that specifies which elements to transform
+ and one transformation function that performs the actual transformation.
+
+ >>> from pyrsistent import freeze, ny
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ When nothing has been transformed the original data structure is kept
+
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+ """
+ return transform(self, transformations)
+
+ def copy(self):
+ return self
+
+ class _Evolver(object):
+ __slots__ = ('_buckets_evolver', '_size', '_original_pmap')
+
+ def __init__(self, original_pmap):
+ self._original_pmap = original_pmap
+ self._buckets_evolver = original_pmap._buckets.evolver()
+ self._size = original_pmap._size
+
+ def __getitem__(self, key):
+ return PMap._getitem(self._buckets_evolver, key)
+
+ def __setitem__(self, key, val):
+ self.set(key, val)
+
+ def set(self, key, val):
+ if len(self._buckets_evolver) < 0.67 * self._size:
+ self._reallocate(2 * len(self._buckets_evolver))
+
+ kv = (key, val)
+ index, bucket = PMap._get_bucket(self._buckets_evolver, key)
+ if bucket:
+ for k, v in bucket:
+ if k == key:
+ if v is not val:
+ new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket]
+ self._buckets_evolver[index] = new_bucket
+
+ return self
+
+ new_bucket = [kv]
+ new_bucket.extend(bucket)
+ self._buckets_evolver[index] = new_bucket
+ self._size += 1
+ else:
+ self._buckets_evolver[index] = [kv]
+ self._size += 1
+
+ return self
+
+ def _reallocate(self, new_size):
+ new_list = new_size * [None]
+ buckets = self._buckets_evolver.persistent()
+ for k, v in chain.from_iterable(x for x in buckets if x):
+ index = hash(k) % new_size
+ if new_list[index]:
+ new_list[index].append((k, v))
+ else:
+ new_list[index] = [(k, v)]
+
+ # A reallocation should always result in a dirty buckets evolver to avoid
+ # possible loss of elements when doing the reallocation.
+ self._buckets_evolver = pvector().evolver()
+ self._buckets_evolver.extend(new_list)
+
+ def is_dirty(self):
+ return self._buckets_evolver.is_dirty()
+
+ def persistent(self):
+ if self.is_dirty():
+ self._original_pmap = PMap(self._size, self._buckets_evolver.persistent())
+
+ return self._original_pmap
+
+ def __len__(self):
+ return self._size
+
+ def __contains__(self, key):
+ return PMap._contains(self._buckets_evolver, key)
+
+ def __delitem__(self, key):
+ self.remove(key)
+
+ def remove(self, key):
+ index, bucket = PMap._get_bucket(self._buckets_evolver, key)
+
+ if bucket:
+ new_bucket = [(k, v) for (k, v) in bucket if k != key]
+ if len(bucket) > len(new_bucket):
+ self._buckets_evolver[index] = new_bucket if new_bucket else None
+ self._size -= 1
+ return self
+
+ raise KeyError('{0}'.format(key))
+
+ def evolver(self):
+ """
+ Create a new evolver for this pmap. For a discussion on evolvers in general see the
+ documentation for the pvector evolver.
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> m1 = m(a=1, b=2)
+ >>> e = m1.evolver()
+ >>> e['c'] = 3
+ >>> len(e)
+ 3
+ >>> del e['a']
+
+ The underlying pmap remains the same:
+
+ >>> m1
+ pmap({'b': 2, 'a': 1})
+
+ The changes are kept in the evolver. An updated pmap can be created using the
+ persistent() function on the evolver.
+
+ >>> m2 = e.persistent()
+ >>> m2
+ pmap({'c': 3, 'b': 2})
+
+ The new pmap will share data with the original pmap in the same way that would have
+ been done if only using operations on the pmap.
+ """
+ return self._Evolver(self)
+
+Mapping.register(PMap)
+Hashable.register(PMap)
+
+
+def _turbo_mapping(initial, pre_size):
+ if pre_size:
+ size = pre_size
+ else:
+ try:
+ size = 2 * len(initial) or 8
+ except Exception:
+ # Guess we can't figure out the length. Give up on length hinting,
+ # we can always reallocate later.
+ size = 8
+
+ buckets = size * [None]
+
+ if not isinstance(initial, Mapping):
+ # Make a dictionary of the initial data if it isn't already,
+ # that will save us some job further down since we can assume no
+ # key collisions
+ initial = dict(initial)
+
+ for k, v in six.iteritems(initial):
+ h = hash(k)
+ index = h % size
+ bucket = buckets[index]
+
+ if bucket:
+ bucket.append((k, v))
+ else:
+ buckets[index] = [(k, v)]
+
+ return PMap(len(initial), pvector().extend(buckets))
+
+
+_EMPTY_PMAP = _turbo_mapping({}, 0)
+
+
+def pmap(initial={}, pre_size=0):
+ """
+ Create new persistent map, inserts all elements in initial into the newly created map.
+ The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This
+ may have a positive performance impact in the cases where you know beforehand that a large number of elements
+ will be inserted into the map eventually since it will reduce the number of reallocations required.
+
+ >>> pmap({'a': 13, 'b': 14})
+ pmap({'b': 14, 'a': 13})
+ """
+ if not initial:
+ return _EMPTY_PMAP
+
+ return _turbo_mapping(initial, pre_size)
+
+
+def m(**kwargs):
+ """
+ Creates a new persitent map. Inserts all key value arguments into the newly created map.
+
+ >>> m(a=13, b=14)
+ pmap({'b': 14, 'a': 13})
+ """
+ return pmap(kwargs)
diff --git a/lib/spack/external/pyrsistent/_pvector.py b/lib/spack/external/pyrsistent/_pvector.py
new file mode 100644
index 0000000000..82232782b7
--- /dev/null
+++ b/lib/spack/external/pyrsistent/_pvector.py
@@ -0,0 +1,713 @@
+from abc import abstractmethod, ABCMeta
+from ._compat import Sequence, Hashable
+from numbers import Integral
+import operator
+import six
+from pyrsistent._transformations import transform
+
+
+def _bitcount(val):
+ return bin(val).count("1")
+
+BRANCH_FACTOR = 32
+BIT_MASK = BRANCH_FACTOR - 1
+SHIFT = _bitcount(BIT_MASK)
+
+
+def compare_pvector(v, other, operator):
+ return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other)
+
+
+def _index_or_slice(index, stop):
+ if stop is None:
+ return index
+
+ return slice(index, stop)
+
+
+class PythonPVector(object):
+ """
+ Support structure for PVector that implements structural sharing for vectors using a trie.
+ """
+ __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__')
+
+ def __new__(cls, count, shift, root, tail):
+ self = super(PythonPVector, cls).__new__(cls)
+ self._count = count
+ self._shift = shift
+ self._root = root
+ self._tail = tail
+
+ # Derived attribute stored for performance
+ self._tail_offset = self._count - len(self._tail)
+ return self
+
+ def __len__(self):
+ return self._count
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ # There are more conditions than the below where it would be OK to
+ # return ourselves, implement those...
+ if index.start is None and index.stop is None and index.step is None:
+ return self
+
+ # This is a bit nasty realizing the whole structure as a list before
+ # slicing it but it is the fastest way I've found to date, and it's easy :-)
+ return _EMPTY_PVECTOR.extend(self.tolist()[index])
+
+ if index < 0:
+ index += self._count
+
+ return PythonPVector._node_for(self, index)[index & BIT_MASK]
+
+ def __add__(self, other):
+ return self.extend(other)
+
+ def __repr__(self):
+ return 'pvector({0})'.format(str(self.tolist()))
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __iter__(self):
+ # This is kind of lazy and will produce some memory overhead but it is the fasted method
+ # by far of those tried since it uses the speed of the built in python list directly.
+ return iter(self.tolist())
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __eq__(self, other):
+ return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq)
+
+ def __gt__(self, other):
+ return compare_pvector(self, other, operator.gt)
+
+ def __lt__(self, other):
+ return compare_pvector(self, other, operator.lt)
+
+ def __ge__(self, other):
+ return compare_pvector(self, other, operator.ge)
+
+ def __le__(self, other):
+ return compare_pvector(self, other, operator.le)
+
+ def __mul__(self, times):
+ if times <= 0 or self is _EMPTY_PVECTOR:
+ return _EMPTY_PVECTOR
+
+ if times == 1:
+ return self
+
+ return _EMPTY_PVECTOR.extend(times * self.tolist())
+
+ __rmul__ = __mul__
+
+ def _fill_list(self, node, shift, the_list):
+ if shift:
+ shift -= SHIFT
+ for n in node:
+ self._fill_list(n, shift, the_list)
+ else:
+ the_list.extend(node)
+
+ def tolist(self):
+ """
+ The fastest way to convert the vector into a python list.
+ """
+ the_list = []
+ self._fill_list(self._root, self._shift, the_list)
+ the_list.extend(self._tail)
+ return the_list
+
+ def _totuple(self):
+ """
+ Returns the content as a python tuple.
+ """
+ return tuple(self.tolist())
+
+ def __hash__(self):
+ # Taking the easy way out again...
+ return hash(self._totuple())
+
+ def transform(self, *transformations):
+ return transform(self, transformations)
+
+ def __reduce__(self):
+ # Pickling support
+ return pvector, (self.tolist(),)
+
+ def mset(self, *args):
+ if len(args) % 2:
+ raise TypeError("mset expected an even number of arguments")
+
+ evolver = self.evolver()
+ for i in range(0, len(args), 2):
+ evolver[args[i]] = args[i+1]
+
+ return evolver.persistent()
+
+ class Evolver(object):
+ __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes',
+ '_extra_tail', '_cached_leafs', '_orig_pvector')
+
+ def __init__(self, v):
+ self._reset(v)
+
+ def __getitem__(self, index):
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ index += self._count + len(self._extra_tail)
+
+ if self._count <= index < self._count + len(self._extra_tail):
+ return self._extra_tail[index - self._count]
+
+ return PythonPVector._node_for(self, index)[index & BIT_MASK]
+
+ def _reset(self, v):
+ self._count = v._count
+ self._shift = v._shift
+ self._root = v._root
+ self._tail = v._tail
+ self._tail_offset = v._tail_offset
+ self._dirty_nodes = {}
+ self._cached_leafs = {}
+ self._extra_tail = []
+ self._orig_pvector = v
+
+ def append(self, element):
+ self._extra_tail.append(element)
+ return self
+
+ def extend(self, iterable):
+ self._extra_tail.extend(iterable)
+ return self
+
+ def set(self, index, val):
+ self[index] = val
+ return self
+
+ def __setitem__(self, index, val):
+ if not isinstance(index, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
+
+ if index < 0:
+ index += self._count + len(self._extra_tail)
+
+ if 0 <= index < self._count:
+ node = self._cached_leafs.get(index >> SHIFT)
+ if node:
+ node[index & BIT_MASK] = val
+ elif index >= self._tail_offset:
+ if id(self._tail) not in self._dirty_nodes:
+ self._tail = list(self._tail)
+ self._dirty_nodes[id(self._tail)] = True
+ self._cached_leafs[index >> SHIFT] = self._tail
+ self._tail[index & BIT_MASK] = val
+ else:
+ self._root = self._do_set(self._shift, self._root, index, val)
+ elif self._count <= index < self._count + len(self._extra_tail):
+ self._extra_tail[index - self._count] = val
+ elif index == self._count + len(self._extra_tail):
+ self._extra_tail.append(val)
+ else:
+ raise IndexError("Index out of range: %s" % (index,))
+
+ def _do_set(self, level, node, i, val):
+ if id(node) in self._dirty_nodes:
+ ret = node
+ else:
+ ret = list(node)
+ self._dirty_nodes[id(ret)] = True
+
+ if level == 0:
+ ret[i & BIT_MASK] = val
+ self._cached_leafs[i >> SHIFT] = ret
+ else:
+ sub_index = (i >> level) & BIT_MASK # >>>
+ ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
+
+ return ret
+
+ def delete(self, index):
+ del self[index]
+ return self
+
+ def __delitem__(self, key):
+ if self._orig_pvector:
+ # All structural sharing bets are off, base evolver on _extra_tail only
+ l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist()
+ l.extend(self._extra_tail)
+ self._reset(_EMPTY_PVECTOR)
+ self._extra_tail = l
+
+ del self._extra_tail[key]
+
+ def persistent(self):
+ result = self._orig_pvector
+ if self.is_dirty():
+ result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail)
+ self._reset(result)
+
+ return result
+
+ def __len__(self):
+ return self._count + len(self._extra_tail)
+
+ def is_dirty(self):
+ return bool(self._dirty_nodes or self._extra_tail)
+
+ def evolver(self):
+ return PythonPVector.Evolver(self)
+
+ def set(self, i, val):
+ # This method could be implemented by a call to mset() but doing so would cause
+ # a ~5 X performance penalty on PyPy (considered the primary platform for this implementation
+ # of PVector) so we're keeping this implementation for now.
+
+ if not isinstance(i, Integral):
+ raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__)
+
+ if i < 0:
+ i += self._count
+
+ if 0 <= i < self._count:
+ if i >= self._tail_offset:
+ new_tail = list(self._tail)
+ new_tail[i & BIT_MASK] = val
+ return PythonPVector(self._count, self._shift, self._root, new_tail)
+
+ return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail)
+
+ if i == self._count:
+ return self.append(val)
+
+ raise IndexError("Index out of range: %s" % (i,))
+
+ def _do_set(self, level, node, i, val):
+ ret = list(node)
+ if level == 0:
+ ret[i & BIT_MASK] = val
+ else:
+ sub_index = (i >> level) & BIT_MASK # >>>
+ ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
+
+ return ret
+
+ @staticmethod
+ def _node_for(pvector_like, i):
+ if 0 <= i < pvector_like._count:
+ if i >= pvector_like._tail_offset:
+ return pvector_like._tail
+
+ node = pvector_like._root
+ for level in range(pvector_like._shift, 0, -SHIFT):
+ node = node[(i >> level) & BIT_MASK] # >>>
+
+ return node
+
+ raise IndexError("Index out of range: %s" % (i,))
+
+ def _create_new_root(self):
+ new_shift = self._shift
+
+ # Overflow root?
+ if (self._count >> SHIFT) > (1 << self._shift): # >>>
+ new_root = [self._root, self._new_path(self._shift, self._tail)]
+ new_shift += SHIFT
+ else:
+ new_root = self._push_tail(self._shift, self._root, self._tail)
+
+ return new_root, new_shift
+
+ def append(self, val):
+ if len(self._tail) < BRANCH_FACTOR:
+ new_tail = list(self._tail)
+ new_tail.append(val)
+ return PythonPVector(self._count + 1, self._shift, self._root, new_tail)
+
+ # Full tail, push into tree
+ new_root, new_shift = self._create_new_root()
+ return PythonPVector(self._count + 1, new_shift, new_root, [val])
+
+ def _new_path(self, level, node):
+ if level == 0:
+ return node
+
+ return [self._new_path(level - SHIFT, node)]
+
+ def _mutating_insert_tail(self):
+ self._root, self._shift = self._create_new_root()
+ self._tail = []
+
+ def _mutating_fill_tail(self, offset, sequence):
+ max_delta_len = BRANCH_FACTOR - len(self._tail)
+ delta = sequence[offset:offset + max_delta_len]
+ self._tail.extend(delta)
+ delta_len = len(delta)
+ self._count += delta_len
+ return offset + delta_len
+
+ def _mutating_extend(self, sequence):
+ offset = 0
+ sequence_len = len(sequence)
+ while offset < sequence_len:
+ offset = self._mutating_fill_tail(offset, sequence)
+ if len(self._tail) == BRANCH_FACTOR:
+ self._mutating_insert_tail()
+
+ self._tail_offset = self._count - len(self._tail)
+
+ def extend(self, obj):
+ # Mutates the new vector directly for efficiency but that's only an
+ # implementation detail, once it is returned it should be considered immutable
+ l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj)
+ if l:
+ new_vector = self.append(l[0])
+ new_vector._mutating_extend(l[1:])
+ return new_vector
+
+ return self
+
+ def _push_tail(self, level, parent, tail_node):
+ """
+ if parent is leaf, insert node,
+ else does it map to an existing child? ->
+ node_to_insert = push node one more level
+ else alloc new path
+
+ return node_to_insert placed in copy of parent
+ """
+ ret = list(parent)
+
+ if level == SHIFT:
+ ret.append(tail_node)
+ return ret
+
+ sub_index = ((self._count - 1) >> level) & BIT_MASK # >>>
+ if len(parent) > sub_index:
+ ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node)
+ return ret
+
+ ret.append(self._new_path(level - SHIFT, tail_node))
+ return ret
+
+ def index(self, value, *args, **kwargs):
+ return self.tolist().index(value, *args, **kwargs)
+
+ def count(self, value):
+ return self.tolist().count(value)
+
+ def delete(self, index, stop=None):
+ l = self.tolist()
+ del l[_index_or_slice(index, stop)]
+ return _EMPTY_PVECTOR.extend(l)
+
+ def remove(self, value):
+ l = self.tolist()
+ l.remove(value)
+ return _EMPTY_PVECTOR.extend(l)
+
+@six.add_metaclass(ABCMeta)
+class PVector(object):
+ """
+ Persistent vector implementation. Meant as a replacement for the cases where you would normally
+ use a Python list.
+
+ Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to
+ create an instance.
+
+ Heavily influenced by the persistent vector available in Clojure. Initially this was more or
+ less just a port of the Java code for the Clojure vector. It has since been modified and to
+ some extent optimized for usage in Python.
+
+ The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No
+ updates are done to the original vector. Structural sharing between vectors are applied where possible to save
+ space and to avoid making complete copies.
+
+ This structure corresponds most closely to the built in list type and is intended as a replacement. Where the
+ semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
+ for example assignments.
+
+ The PVector implements the Sequence protocol and is Hashable.
+
+ Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.
+
+ The following are examples of some common operations on persistent vectors:
+
+ >>> p = v(1, 2, 3)
+ >>> p2 = p.append(4)
+ >>> p3 = p2.extend([5, 6, 7])
+ >>> p
+ pvector([1, 2, 3])
+ >>> p2
+ pvector([1, 2, 3, 4])
+ >>> p3
+ pvector([1, 2, 3, 4, 5, 6, 7])
+ >>> p3[5]
+ 6
+ >>> p.set(1, 99)
+ pvector([1, 99, 3])
+ >>>
+ """
+
+ @abstractmethod
+ def __len__(self):
+ """
+ >>> len(v(1, 2, 3))
+ 3
+ """
+
+ @abstractmethod
+ def __getitem__(self, index):
+ """
+ Get value at index. Full slicing support.
+
+ >>> v1 = v(5, 6, 7, 8)
+ >>> v1[2]
+ 7
+ >>> v1[1:3]
+ pvector([6, 7])
+ """
+
+ @abstractmethod
+ def __add__(self, other):
+ """
+ >>> v1 = v(1, 2)
+ >>> v2 = v(3, 4)
+ >>> v1 + v2
+ pvector([1, 2, 3, 4])
+ """
+
+ @abstractmethod
+ def __mul__(self, times):
+ """
+ >>> v1 = v(1, 2)
+ >>> 3 * v1
+ pvector([1, 2, 1, 2, 1, 2])
+ """
+
+ @abstractmethod
+ def __hash__(self):
+ """
+ >>> v1 = v(1, 2, 3)
+ >>> v2 = v(1, 2, 3)
+ >>> hash(v1) == hash(v2)
+ True
+ """
+
+ @abstractmethod
+ def evolver(self):
+ """
+ Create a new evolver for this pvector. The evolver acts as a mutable view of the vector
+ with "transaction like" semantics. No part of the underlying vector i updated, it is still
+ fully immutable. Furthermore multiple evolvers created from the same pvector do not
+ interfere with each other.
+
+ You may want to use an evolver instead of working directly with the pvector in the
+ following cases:
+
+ * Multiple updates are done to the same vector and the intermediate results are of no
+ interest. In this case using an evolver may be a more efficient and easier to work with.
+ * You need to pass a vector into a legacy function or a function that you have no control
+ over which performs in place mutations of lists. In this case pass an evolver instance
+ instead and then create a new pvector from the evolver once the function returns.
+
+ The following example illustrates a typical workflow when working with evolvers. It also
+ displays most of the API (which i kept small by design, you should not be tempted to
+ use evolvers in excess ;-)).
+
+ Create the evolver and perform various mutating updates to it:
+
+ >>> v1 = v(1, 2, 3, 4, 5)
+ >>> e = v1.evolver()
+ >>> e[1] = 22
+ >>> _ = e.append(6)
+ >>> _ = e.extend([7, 8, 9])
+ >>> e[8] += 1
+ >>> len(e)
+ 9
+
+ The underlying pvector remains the same:
+
+ >>> v1
+ pvector([1, 2, 3, 4, 5])
+
+ The changes are kept in the evolver. An updated pvector can be created using the
+ persistent() function on the evolver.
+
+ >>> v2 = e.persistent()
+ >>> v2
+ pvector([1, 22, 3, 4, 5, 6, 7, 8, 10])
+
+ The new pvector will share data with the original pvector in the same way that would have
+ been done if only using operations on the pvector.
+ """
+
+ @abstractmethod
+ def mset(self, *args):
+ """
+ Return a new vector with elements in specified positions replaced by values (multi set).
+
+ Elements on even positions in the argument list are interpreted as indexes while
+ elements on odd positions are considered values.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.mset(0, 11, 2, 33)
+ pvector([11, 2, 33])
+ """
+
+ @abstractmethod
+ def set(self, i, val):
+ """
+ Return a new vector with element at position i replaced with val. The original vector remains unchanged.
+
+ Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will
+ result in an IndexError.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.set(1, 4)
+ pvector([1, 4, 3])
+ >>> v1.set(3, 4)
+ pvector([1, 2, 3, 4])
+ >>> v1.set(-1, 4)
+ pvector([1, 2, 4])
+ """
+
+ @abstractmethod
+ def append(self, val):
+ """
+ Return a new vector with val appended.
+
+ >>> v1 = v(1, 2)
+ >>> v1.append(3)
+ pvector([1, 2, 3])
+ """
+
+ @abstractmethod
+ def extend(self, obj):
+ """
+ Return a new vector with all values in obj appended to it. Obj may be another
+ PVector or any other Iterable.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1.extend([4, 5])
+ pvector([1, 2, 3, 4, 5])
+ """
+
+ @abstractmethod
+ def index(self, value, *args, **kwargs):
+ """
+ Return first index of value. Additional indexes may be supplied to limit the search to a
+ sub range of the vector.
+
+ >>> v1 = v(1, 2, 3, 4, 3)
+ >>> v1.index(3)
+ 2
+ >>> v1.index(3, 3, 5)
+ 4
+ """
+
+ @abstractmethod
+ def count(self, value):
+ """
+ Return the number of times that value appears in the vector.
+
+ >>> v1 = v(1, 4, 3, 4)
+ >>> v1.count(4)
+ 2
+ """
+
+ @abstractmethod
+ def transform(self, *transformations):
+ """
+ Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
+ consists of two parts. One match expression that specifies which elements to transform
+ and one transformation function that performs the actual transformation.
+
+ >>> from pyrsistent import freeze, ny
+ >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
+ ... {'author': 'Steve', 'content': 'A slightly longer article'}],
+ ... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
+ >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
+ >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
+ >>> very_short_news.articles[0].content
+ 'A short article'
+ >>> very_short_news.articles[1].content
+ 'A slightly long...'
+
+ When nothing has been transformed the original data structure is kept
+
+ >>> short_news is news_paper
+ True
+ >>> very_short_news is news_paper
+ False
+ >>> very_short_news.articles[0] is news_paper.articles[0]
+ True
+ """
+
+ @abstractmethod
+ def delete(self, index, stop=None):
+ """
+ Delete a portion of the vector by index or range.
+
+ >>> v1 = v(1, 2, 3, 4, 5)
+ >>> v1.delete(1)
+ pvector([1, 3, 4, 5])
+ >>> v1.delete(1, 3)
+ pvector([1, 4, 5])
+ """
+
+ @abstractmethod
+ def remove(self, value):
+ """
+ Remove the first occurrence of a value from the vector.
+
+ >>> v1 = v(1, 2, 3, 2, 1)
+ >>> v2 = v1.remove(1)
+ >>> v2
+ pvector([2, 3, 2, 1])
+ >>> v2.remove(1)
+ pvector([2, 3, 2])
+ """
+
+
+_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], [])
+PVector.register(PythonPVector)
+Sequence.register(PVector)
+Hashable.register(PVector)
+
+def python_pvector(iterable=()):
+ """
+ Create a new persistent vector containing the elements in iterable.
+
+ >>> v1 = pvector([1, 2, 3])
+ >>> v1
+ pvector([1, 2, 3])
+ """
+ return _EMPTY_PVECTOR.extend(iterable)
+
+try:
+ # Use the C extension as underlying trie implementation if it is available
+ import os
+ if os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
+ pvector = python_pvector
+ else:
+ from pvectorc import pvector
+ PVector.register(type(pvector()))
+except ImportError:
+ pvector = python_pvector
+
+
+def v(*elements):
+ """
+ Create a new persistent vector containing all parameters to this function.
+
+ >>> v1 = v(1, 2, 3)
+ >>> v1
+ pvector([1, 2, 3])
+ """
+ return pvector(elements)
diff --git a/lib/spack/external/pyrsistent/_transformations.py b/lib/spack/external/pyrsistent/_transformations.py
new file mode 100644
index 0000000000..612098969b
--- /dev/null
+++ b/lib/spack/external/pyrsistent/_transformations.py
@@ -0,0 +1,143 @@
+import re
+import six
+try:
+ from inspect import Parameter, signature
+except ImportError:
+ signature = None
+ try:
+ from inspect import getfullargspec as getargspec
+ except ImportError:
+ from inspect import getargspec
+
+
+_EMPTY_SENTINEL = object()
+
+
+def inc(x):
+ """ Add one to the current value """
+ return x + 1
+
+
+def dec(x):
+ """ Subtract one from the current value """
+ return x - 1
+
+
+def discard(evolver, key):
+ """ Discard the element and returns a structure without the discarded elements """
+ try:
+ del evolver[key]
+ except KeyError:
+ pass
+
+
+# Matchers
+def rex(expr):
+ """ Regular expression matcher to use together with transform functions """
+ r = re.compile(expr)
+ return lambda key: isinstance(key, six.string_types) and r.match(key)
+
+
+def ny(_):
+ """ Matcher that matches any value """
+ return True
+
+
+# Support functions
+def _chunks(l, n):
+ for i in range(0, len(l), n):
+ yield l[i:i + n]
+
+
+def transform(structure, transformations):
+ r = structure
+ for path, command in _chunks(transformations, 2):
+ r = _do_to_path(r, path, command)
+ return r
+
+
+def _do_to_path(structure, path, command):
+ if not path:
+ return command(structure) if callable(command) else command
+
+ kvs = _get_keys_and_values(structure, path[0])
+ return _update_structure(structure, kvs, path[1:], command)
+
+
+def _items(structure):
+ try:
+ return structure.items()
+ except AttributeError:
+ # Support wider range of structures by adding a transform_items() or similar?
+ return list(enumerate(structure))
+
+
+def _get(structure, key, default):
+ try:
+ if hasattr(structure, '__getitem__'):
+ return structure[key]
+
+ return getattr(structure, key)
+
+ except (IndexError, KeyError):
+ return default
+
+
+def _get_keys_and_values(structure, key_spec):
+ if callable(key_spec):
+ # Support predicates as callable objects in the path
+ arity = _get_arity(key_spec)
+ if arity == 1:
+ # Unary predicates are called with the "key" of the path
+ # - eg a key in a mapping, an index in a sequence.
+ return [(k, v) for k, v in _items(structure) if key_spec(k)]
+ elif arity == 2:
+ # Binary predicates are called with the key and the corresponding
+ # value.
+ return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
+ else:
+ # Other arities are an error.
+ raise ValueError(
+ "callable in transform path must take 1 or 2 arguments"
+ )
+
+ # Non-callables are used as-is as a key.
+ return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))]
+
+
+if signature is None:
+ def _get_arity(f):
+ argspec = getargspec(f)
+ return len(argspec.args) - len(argspec.defaults or ())
+else:
+ def _get_arity(f):
+ return sum(
+ 1
+ for p
+ in signature(f).parameters.values()
+ if p.default is Parameter.empty
+ and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
+ )
+
+
+def _update_structure(structure, kvs, path, command):
+ from pyrsistent._pmap import pmap
+ e = structure.evolver()
+ if not path and command is discard:
+ # Do this in reverse to avoid index problems with vectors. See #92.
+ for k, v in reversed(kvs):
+ discard(e, k)
+ else:
+ for k, v in kvs:
+ is_empty = False
+ if v is _EMPTY_SENTINEL:
+ # Allow expansion of structure but make sure to cover the case
+ # when an empty pmap is added as leaf node. See #154.
+ is_empty = True
+ v = pmap()
+
+ result = _do_to_path(v, path, command)
+ if result is not v or is_empty:
+ e[k] = result
+
+ return e.persistent()
diff --git a/lib/spack/external/_pytest/LICENSE b/lib/spack/external/pytest-fallback/_pytest/LICENSE
index 629df45ac4..629df45ac4 100644
--- a/lib/spack/external/_pytest/LICENSE
+++ b/lib/spack/external/pytest-fallback/_pytest/LICENSE
diff --git a/lib/spack/external/_pytest/__init__.py b/lib/spack/external/pytest-fallback/_pytest/__init__.py
index 6e41f0504e..6e41f0504e 100644
--- a/lib/spack/external/_pytest/__init__.py
+++ b/lib/spack/external/pytest-fallback/_pytest/__init__.py
diff --git a/lib/spack/external/_pytest/_argcomplete.py b/lib/spack/external/pytest-fallback/_pytest/_argcomplete.py
index 965ec79513..965ec79513 100644
--- a/lib/spack/external/_pytest/_argcomplete.py
+++ b/lib/spack/external/pytest-fallback/_pytest/_argcomplete.py
diff --git a/lib/spack/external/_pytest/_code/__init__.py b/lib/spack/external/pytest-fallback/_pytest/_code/__init__.py
index 815c13b42c..815c13b42c 100644
--- a/lib/spack/external/_pytest/_code/__init__.py
+++ b/lib/spack/external/pytest-fallback/_pytest/_code/__init__.py
diff --git a/lib/spack/external/_pytest/_code/_py2traceback.py b/lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py
index 5aacf0a428..5aacf0a428 100644
--- a/lib/spack/external/_pytest/_code/_py2traceback.py
+++ b/lib/spack/external/pytest-fallback/_pytest/_code/_py2traceback.py
diff --git a/lib/spack/external/_pytest/_code/code.py b/lib/spack/external/pytest-fallback/_pytest/_code/code.py
index f3b7eedfce..f3b7eedfce 100644
--- a/lib/spack/external/_pytest/_code/code.py
+++ b/lib/spack/external/pytest-fallback/_pytest/_code/code.py
diff --git a/lib/spack/external/_pytest/_code/source.py b/lib/spack/external/pytest-fallback/_pytest/_code/source.py
index fc41712649..fc41712649 100644
--- a/lib/spack/external/_pytest/_code/source.py
+++ b/lib/spack/external/pytest-fallback/_pytest/_code/source.py
diff --git a/lib/spack/external/_pytest/_pluggy.py b/lib/spack/external/pytest-fallback/_pytest/_pluggy.py
index 6cc1d3d54a..6cc1d3d54a 100644
--- a/lib/spack/external/_pytest/_pluggy.py
+++ b/lib/spack/external/pytest-fallback/_pytest/_pluggy.py
diff --git a/lib/spack/external/_pytest/_version.py b/lib/spack/external/pytest-fallback/_pytest/_version.py
index 3edb7da9ad..3edb7da9ad 100644
--- a/lib/spack/external/_pytest/_version.py
+++ b/lib/spack/external/pytest-fallback/_pytest/_version.py
diff --git a/lib/spack/external/_pytest/assertion/__init__.py b/lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py
index b0ef667d56..b0ef667d56 100644
--- a/lib/spack/external/_pytest/assertion/__init__.py
+++ b/lib/spack/external/pytest-fallback/_pytest/assertion/__init__.py
diff --git a/lib/spack/external/_pytest/assertion/rewrite.py b/lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py
index d48b6648fb..d48b6648fb 100644
--- a/lib/spack/external/_pytest/assertion/rewrite.py
+++ b/lib/spack/external/pytest-fallback/_pytest/assertion/rewrite.py
diff --git a/lib/spack/external/_pytest/assertion/truncate.py b/lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py
index 1e13063569..1e13063569 100644
--- a/lib/spack/external/_pytest/assertion/truncate.py
+++ b/lib/spack/external/pytest-fallback/_pytest/assertion/truncate.py
diff --git a/lib/spack/external/_pytest/assertion/util.py b/lib/spack/external/pytest-fallback/_pytest/assertion/util.py
index c09eff06b0..c09eff06b0 100644
--- a/lib/spack/external/_pytest/assertion/util.py
+++ b/lib/spack/external/pytest-fallback/_pytest/assertion/util.py
diff --git a/lib/spack/external/_pytest/cacheprovider.py b/lib/spack/external/pytest-fallback/_pytest/cacheprovider.py
index c537c14472..c537c14472 100755
--- a/lib/spack/external/_pytest/cacheprovider.py
+++ b/lib/spack/external/pytest-fallback/_pytest/cacheprovider.py
diff --git a/lib/spack/external/_pytest/capture.py b/lib/spack/external/pytest-fallback/_pytest/capture.py
index cb5af6fcb3..cb5af6fcb3 100644
--- a/lib/spack/external/_pytest/capture.py
+++ b/lib/spack/external/pytest-fallback/_pytest/capture.py
diff --git a/lib/spack/external/_pytest/compat.py b/lib/spack/external/pytest-fallback/_pytest/compat.py
index 255f69ce0d..255f69ce0d 100644
--- a/lib/spack/external/_pytest/compat.py
+++ b/lib/spack/external/pytest-fallback/_pytest/compat.py
diff --git a/lib/spack/external/_pytest/config.py b/lib/spack/external/pytest-fallback/_pytest/config.py
index 513478a972..513478a972 100644
--- a/lib/spack/external/_pytest/config.py
+++ b/lib/spack/external/pytest-fallback/_pytest/config.py
diff --git a/lib/spack/external/_pytest/debugging.py b/lib/spack/external/pytest-fallback/_pytest/debugging.py
index aa9c9a3863..aa9c9a3863 100644
--- a/lib/spack/external/_pytest/debugging.py
+++ b/lib/spack/external/pytest-fallback/_pytest/debugging.py
diff --git a/lib/spack/external/_pytest/deprecated.py b/lib/spack/external/pytest-fallback/_pytest/deprecated.py
index 38e9496778..38e9496778 100644
--- a/lib/spack/external/_pytest/deprecated.py
+++ b/lib/spack/external/pytest-fallback/_pytest/deprecated.py
diff --git a/lib/spack/external/_pytest/doctest.py b/lib/spack/external/pytest-fallback/_pytest/doctest.py
index 4c05acddf7..4c05acddf7 100644
--- a/lib/spack/external/_pytest/doctest.py
+++ b/lib/spack/external/pytest-fallback/_pytest/doctest.py
diff --git a/lib/spack/external/_pytest/fixtures.py b/lib/spack/external/pytest-fallback/_pytest/fixtures.py
index 98317a4889..7ad495615e 100644
--- a/lib/spack/external/_pytest/fixtures.py
+++ b/lib/spack/external/pytest-fallback/_pytest/fixtures.py
@@ -1,5 +1,6 @@
from __future__ import absolute_import, division, print_function
+import collections
import inspect
import sys
import warnings
@@ -21,9 +22,6 @@ from _pytest.compat import (
from _pytest.outcomes import fail, TEST_OUTCOME
-from ordereddict_backport import OrderedDict
-
-
def pytest_sessionstart(session):
import _pytest.python
scopename2class.update({
@@ -165,7 +163,7 @@ def reorder_items(items):
for scopenum in range(0, scopenum_function):
argkeys_cache[scopenum] = d = {}
for item in items:
- keys = OrderedDict.fromkeys(get_parametrized_fixture_keys(item, scopenum))
+ keys = collections.OrderedDict.fromkeys(get_parametrized_fixture_keys(item, scopenum))
if keys:
d[item] = keys
return reorder_items_atscope(items, set(), argkeys_cache, 0)
@@ -200,7 +198,7 @@ def slice_items(items, ignore, scoped_argkeys_cache):
for i, item in enumerate(it):
argkeys = scoped_argkeys_cache.get(item)
if argkeys is not None:
- newargkeys = OrderedDict.fromkeys(k for k in argkeys if k not in ignore)
+ newargkeys = collections.OrderedDict.fromkeys(k for k in argkeys if k not in ignore)
if newargkeys: # found a slicing key
slicing_argkey, _ = newargkeys.popitem()
items_before = items[:i]
diff --git a/lib/spack/external/_pytest/freeze_support.py b/lib/spack/external/pytest-fallback/_pytest/freeze_support.py
index 97147a8825..97147a8825 100644
--- a/lib/spack/external/_pytest/freeze_support.py
+++ b/lib/spack/external/pytest-fallback/_pytest/freeze_support.py
diff --git a/lib/spack/external/_pytest/helpconfig.py b/lib/spack/external/pytest-fallback/_pytest/helpconfig.py
index e744637f86..e744637f86 100644
--- a/lib/spack/external/_pytest/helpconfig.py
+++ b/lib/spack/external/pytest-fallback/_pytest/helpconfig.py
diff --git a/lib/spack/external/_pytest/hookspec.py b/lib/spack/external/pytest-fallback/_pytest/hookspec.py
index e5c966e58b..e5c966e58b 100644
--- a/lib/spack/external/_pytest/hookspec.py
+++ b/lib/spack/external/pytest-fallback/_pytest/hookspec.py
diff --git a/lib/spack/external/_pytest/junitxml.py b/lib/spack/external/pytest-fallback/_pytest/junitxml.py
index 7fb40dc354..7fb40dc354 100644
--- a/lib/spack/external/_pytest/junitxml.py
+++ b/lib/spack/external/pytest-fallback/_pytest/junitxml.py
diff --git a/lib/spack/external/_pytest/main.py b/lib/spack/external/pytest-fallback/_pytest/main.py
index 98aa28eb34..98aa28eb34 100644
--- a/lib/spack/external/_pytest/main.py
+++ b/lib/spack/external/pytest-fallback/_pytest/main.py
diff --git a/lib/spack/external/_pytest/mark.py b/lib/spack/external/pytest-fallback/_pytest/mark.py
index 454722ca2c..454722ca2c 100644
--- a/lib/spack/external/_pytest/mark.py
+++ b/lib/spack/external/pytest-fallback/_pytest/mark.py
diff --git a/lib/spack/external/_pytest/monkeypatch.py b/lib/spack/external/pytest-fallback/_pytest/monkeypatch.py
index 39ac770135..39ac770135 100644
--- a/lib/spack/external/_pytest/monkeypatch.py
+++ b/lib/spack/external/pytest-fallback/_pytest/monkeypatch.py
diff --git a/lib/spack/external/_pytest/nodes.py b/lib/spack/external/pytest-fallback/_pytest/nodes.py
index ad3af2ce67..ad3af2ce67 100644
--- a/lib/spack/external/_pytest/nodes.py
+++ b/lib/spack/external/pytest-fallback/_pytest/nodes.py
diff --git a/lib/spack/external/_pytest/nose.py b/lib/spack/external/pytest-fallback/_pytest/nose.py
index d246c5603d..d246c5603d 100644
--- a/lib/spack/external/_pytest/nose.py
+++ b/lib/spack/external/pytest-fallback/_pytest/nose.py
diff --git a/lib/spack/external/_pytest/outcomes.py b/lib/spack/external/pytest-fallback/_pytest/outcomes.py
index ff5ef756d9..ff5ef756d9 100644
--- a/lib/spack/external/_pytest/outcomes.py
+++ b/lib/spack/external/pytest-fallback/_pytest/outcomes.py
diff --git a/lib/spack/external/_pytest/pastebin.py b/lib/spack/external/pytest-fallback/_pytest/pastebin.py
index 9d689819f0..9d689819f0 100644
--- a/lib/spack/external/_pytest/pastebin.py
+++ b/lib/spack/external/pytest-fallback/_pytest/pastebin.py
diff --git a/lib/spack/external/_pytest/pytester.py b/lib/spack/external/pytest-fallback/_pytest/pytester.py
index 82aa00e0d2..82aa00e0d2 100644
--- a/lib/spack/external/_pytest/pytester.py
+++ b/lib/spack/external/pytest-fallback/_pytest/pytester.py
diff --git a/lib/spack/external/_pytest/python.py b/lib/spack/external/pytest-fallback/_pytest/python.py
index 41fd2bdb7f..41fd2bdb7f 100644
--- a/lib/spack/external/_pytest/python.py
+++ b/lib/spack/external/pytest-fallback/_pytest/python.py
diff --git a/lib/spack/external/_pytest/python_api.py b/lib/spack/external/pytest-fallback/_pytest/python_api.py
index a931b4d2c7..a931b4d2c7 100644
--- a/lib/spack/external/_pytest/python_api.py
+++ b/lib/spack/external/pytest-fallback/_pytest/python_api.py
diff --git a/lib/spack/external/_pytest/recwarn.py b/lib/spack/external/pytest-fallback/_pytest/recwarn.py
index c9fa872c07..c9fa872c07 100644
--- a/lib/spack/external/_pytest/recwarn.py
+++ b/lib/spack/external/pytest-fallback/_pytest/recwarn.py
diff --git a/lib/spack/external/_pytest/resultlog.py b/lib/spack/external/pytest-fallback/_pytest/resultlog.py
index 9f9c2d1f65..9f9c2d1f65 100644
--- a/lib/spack/external/_pytest/resultlog.py
+++ b/lib/spack/external/pytest-fallback/_pytest/resultlog.py
diff --git a/lib/spack/external/_pytest/runner.py b/lib/spack/external/pytest-fallback/_pytest/runner.py
index b643fa3c91..b643fa3c91 100644
--- a/lib/spack/external/_pytest/runner.py
+++ b/lib/spack/external/pytest-fallback/_pytest/runner.py
diff --git a/lib/spack/external/_pytest/setuponly.py b/lib/spack/external/pytest-fallback/_pytest/setuponly.py
index 15e195ad5a..15e195ad5a 100644
--- a/lib/spack/external/_pytest/setuponly.py
+++ b/lib/spack/external/pytest-fallback/_pytest/setuponly.py
diff --git a/lib/spack/external/_pytest/setupplan.py b/lib/spack/external/pytest-fallback/_pytest/setupplan.py
index e11bd40698..e11bd40698 100644
--- a/lib/spack/external/_pytest/setupplan.py
+++ b/lib/spack/external/pytest-fallback/_pytest/setupplan.py
diff --git a/lib/spack/external/_pytest/skipping.py b/lib/spack/external/pytest-fallback/_pytest/skipping.py
index b92800d10b..b92800d10b 100644
--- a/lib/spack/external/_pytest/skipping.py
+++ b/lib/spack/external/pytest-fallback/_pytest/skipping.py
diff --git a/lib/spack/external/_pytest/terminal.py b/lib/spack/external/pytest-fallback/_pytest/terminal.py
index 9da94d0c91..9da94d0c91 100644
--- a/lib/spack/external/_pytest/terminal.py
+++ b/lib/spack/external/pytest-fallback/_pytest/terminal.py
diff --git a/lib/spack/external/_pytest/tmpdir.py b/lib/spack/external/pytest-fallback/_pytest/tmpdir.py
index da1b032237..da1b032237 100644
--- a/lib/spack/external/_pytest/tmpdir.py
+++ b/lib/spack/external/pytest-fallback/_pytest/tmpdir.py
diff --git a/lib/spack/external/_pytest/unittest.py b/lib/spack/external/pytest-fallback/_pytest/unittest.py
index 52c9813e8b..52c9813e8b 100644
--- a/lib/spack/external/_pytest/unittest.py
+++ b/lib/spack/external/pytest-fallback/_pytest/unittest.py
diff --git a/lib/spack/external/_pytest/vendored_packages/README.md b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md
index b5fe6febb0..b5fe6febb0 100644
--- a/lib/spack/external/_pytest/vendored_packages/README.md
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/README.md
diff --git a/lib/spack/external/_pytest/vendored_packages/__init__.py b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/lib/spack/external/_pytest/vendored_packages/__init__.py
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst
index da0e7a6ed7..da0e7a6ed7 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER
index a1b589e38a..a1b589e38a 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt
index 121017d086..121017d086 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA
index bd88517c94..bd88517c94 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD
index 3003a3bf2b..3003a3bf2b 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL
index 8b6dd1b5a8..8b6dd1b5a8 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json
index cde22aff02..cde22aff02 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt
index 11bdb5c1f5..11bdb5c1f5 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy.py b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py
index 6f26552d73..6f26552d73 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy.py
+++ b/lib/spack/external/pytest-fallback/_pytest/vendored_packages/pluggy.py
diff --git a/lib/spack/external/_pytest/warnings.py b/lib/spack/external/pytest-fallback/_pytest/warnings.py
index 926b1f5811..926b1f5811 100644
--- a/lib/spack/external/_pytest/warnings.py
+++ b/lib/spack/external/pytest-fallback/_pytest/warnings.py
diff --git a/lib/spack/external/py/__init__.py b/lib/spack/external/pytest-fallback/py/__init__.py
index 85af650f5c..85af650f5c 100644
--- a/lib/spack/external/py/__init__.py
+++ b/lib/spack/external/pytest-fallback/py/__init__.py
diff --git a/lib/spack/external/py/__metainfo.py b/lib/spack/external/pytest-fallback/py/__metainfo.py
index 12581eb7af..12581eb7af 100644
--- a/lib/spack/external/py/__metainfo.py
+++ b/lib/spack/external/pytest-fallback/py/__metainfo.py
diff --git a/lib/spack/external/py/_apipkg.py b/lib/spack/external/pytest-fallback/py/_apipkg.py
index a73b8f6d0b..a73b8f6d0b 100644
--- a/lib/spack/external/py/_apipkg.py
+++ b/lib/spack/external/pytest-fallback/py/_apipkg.py
diff --git a/lib/spack/external/py/_builtin.py b/lib/spack/external/pytest-fallback/py/_builtin.py
index 52ee9d79ca..52ee9d79ca 100644
--- a/lib/spack/external/py/_builtin.py
+++ b/lib/spack/external/pytest-fallback/py/_builtin.py
diff --git a/lib/spack/external/py/_code/__init__.py b/lib/spack/external/pytest-fallback/py/_code/__init__.py
index f15acf8513..f15acf8513 100644
--- a/lib/spack/external/py/_code/__init__.py
+++ b/lib/spack/external/pytest-fallback/py/_code/__init__.py
diff --git a/lib/spack/external/py/_code/_assertionnew.py b/lib/spack/external/pytest-fallback/py/_code/_assertionnew.py
index afb1b31ff0..afb1b31ff0 100644
--- a/lib/spack/external/py/_code/_assertionnew.py
+++ b/lib/spack/external/pytest-fallback/py/_code/_assertionnew.py
diff --git a/lib/spack/external/py/_code/_assertionold.py b/lib/spack/external/pytest-fallback/py/_code/_assertionold.py
index 4e81fb3ef6..4e81fb3ef6 100644
--- a/lib/spack/external/py/_code/_assertionold.py
+++ b/lib/spack/external/pytest-fallback/py/_code/_assertionold.py
diff --git a/lib/spack/external/py/_code/_py2traceback.py b/lib/spack/external/pytest-fallback/py/_code/_py2traceback.py
index d65e27cb73..d65e27cb73 100644
--- a/lib/spack/external/py/_code/_py2traceback.py
+++ b/lib/spack/external/pytest-fallback/py/_code/_py2traceback.py
diff --git a/lib/spack/external/py/_code/assertion.py b/lib/spack/external/pytest-fallback/py/_code/assertion.py
index 4ce80c75b1..4ce80c75b1 100644
--- a/lib/spack/external/py/_code/assertion.py
+++ b/lib/spack/external/pytest-fallback/py/_code/assertion.py
diff --git a/lib/spack/external/py/_code/code.py b/lib/spack/external/pytest-fallback/py/_code/code.py
index 20fd965c97..20fd965c97 100644
--- a/lib/spack/external/py/_code/code.py
+++ b/lib/spack/external/pytest-fallback/py/_code/code.py
diff --git a/lib/spack/external/py/_code/source.py b/lib/spack/external/pytest-fallback/py/_code/source.py
index c8b668b2fb..c8b668b2fb 100644
--- a/lib/spack/external/py/_code/source.py
+++ b/lib/spack/external/pytest-fallback/py/_code/source.py
diff --git a/lib/spack/external/py/_error.py b/lib/spack/external/pytest-fallback/py/_error.py
index 8ca339beba..8ca339beba 100644
--- a/lib/spack/external/py/_error.py
+++ b/lib/spack/external/pytest-fallback/py/_error.py
diff --git a/lib/spack/external/py/_iniconfig.py b/lib/spack/external/pytest-fallback/py/_iniconfig.py
index 92b50bd853..92b50bd853 100644
--- a/lib/spack/external/py/_iniconfig.py
+++ b/lib/spack/external/pytest-fallback/py/_iniconfig.py
diff --git a/lib/spack/external/py/_io/__init__.py b/lib/spack/external/pytest-fallback/py/_io/__init__.py
index 835f01f3ab..835f01f3ab 100644
--- a/lib/spack/external/py/_io/__init__.py
+++ b/lib/spack/external/pytest-fallback/py/_io/__init__.py
diff --git a/lib/spack/external/py/_io/capture.py b/lib/spack/external/pytest-fallback/py/_io/capture.py
index bc157ed978..bc157ed978 100644
--- a/lib/spack/external/py/_io/capture.py
+++ b/lib/spack/external/pytest-fallback/py/_io/capture.py
diff --git a/lib/spack/external/py/_io/saferepr.py b/lib/spack/external/pytest-fallback/py/_io/saferepr.py
index 8518290efd..8518290efd 100644
--- a/lib/spack/external/py/_io/saferepr.py
+++ b/lib/spack/external/pytest-fallback/py/_io/saferepr.py
diff --git a/lib/spack/external/py/_io/terminalwriter.py b/lib/spack/external/pytest-fallback/py/_io/terminalwriter.py
index 390e8ca7b9..390e8ca7b9 100644
--- a/lib/spack/external/py/_io/terminalwriter.py
+++ b/lib/spack/external/pytest-fallback/py/_io/terminalwriter.py
diff --git a/lib/spack/external/py/_log/__init__.py b/lib/spack/external/pytest-fallback/py/_log/__init__.py
index fad62e960d..fad62e960d 100644
--- a/lib/spack/external/py/_log/__init__.py
+++ b/lib/spack/external/pytest-fallback/py/_log/__init__.py
diff --git a/lib/spack/external/py/_log/log.py b/lib/spack/external/pytest-fallback/py/_log/log.py
index ce47e8c754..ce47e8c754 100644
--- a/lib/spack/external/py/_log/log.py
+++ b/lib/spack/external/pytest-fallback/py/_log/log.py
diff --git a/lib/spack/external/py/_log/warning.py b/lib/spack/external/pytest-fallback/py/_log/warning.py
index 722e31e910..722e31e910 100644
--- a/lib/spack/external/py/_log/warning.py
+++ b/lib/spack/external/pytest-fallback/py/_log/warning.py
diff --git a/lib/spack/external/py/_path/__init__.py b/lib/spack/external/pytest-fallback/py/_path/__init__.py
index 51f3246f80..51f3246f80 100644
--- a/lib/spack/external/py/_path/__init__.py
+++ b/lib/spack/external/pytest-fallback/py/_path/__init__.py
diff --git a/lib/spack/external/py/_path/cacheutil.py b/lib/spack/external/pytest-fallback/py/_path/cacheutil.py
index 9922504750..9922504750 100644
--- a/lib/spack/external/py/_path/cacheutil.py
+++ b/lib/spack/external/pytest-fallback/py/_path/cacheutil.py
diff --git a/lib/spack/external/py/_path/common.py b/lib/spack/external/pytest-fallback/py/_path/common.py
index 5512e51efe..5512e51efe 100644
--- a/lib/spack/external/py/_path/common.py
+++ b/lib/spack/external/pytest-fallback/py/_path/common.py
diff --git a/lib/spack/external/py/_path/local.py b/lib/spack/external/pytest-fallback/py/_path/local.py
index d2f16b993e..d2f16b993e 100644
--- a/lib/spack/external/py/_path/local.py
+++ b/lib/spack/external/pytest-fallback/py/_path/local.py
diff --git a/lib/spack/external/py/_path/svnurl.py b/lib/spack/external/pytest-fallback/py/_path/svnurl.py
index 6589a71d09..6589a71d09 100644
--- a/lib/spack/external/py/_path/svnurl.py
+++ b/lib/spack/external/pytest-fallback/py/_path/svnurl.py
diff --git a/lib/spack/external/py/_path/svnwc.py b/lib/spack/external/pytest-fallback/py/_path/svnwc.py
index 992223c04a..992223c04a 100644
--- a/lib/spack/external/py/_path/svnwc.py
+++ b/lib/spack/external/pytest-fallback/py/_path/svnwc.py
diff --git a/lib/spack/external/py/_process/__init__.py b/lib/spack/external/pytest-fallback/py/_process/__init__.py
index 86c714ad1a..86c714ad1a 100644
--- a/lib/spack/external/py/_process/__init__.py
+++ b/lib/spack/external/pytest-fallback/py/_process/__init__.py
diff --git a/lib/spack/external/py/_process/cmdexec.py b/lib/spack/external/pytest-fallback/py/_process/cmdexec.py
index f83a249402..f83a249402 100644
--- a/lib/spack/external/py/_process/cmdexec.py
+++ b/lib/spack/external/pytest-fallback/py/_process/cmdexec.py
diff --git a/lib/spack/external/py/_process/forkedfunc.py b/lib/spack/external/pytest-fallback/py/_process/forkedfunc.py
index 1c28530688..1c28530688 100644
--- a/lib/spack/external/py/_process/forkedfunc.py
+++ b/lib/spack/external/pytest-fallback/py/_process/forkedfunc.py
diff --git a/lib/spack/external/py/_process/killproc.py b/lib/spack/external/pytest-fallback/py/_process/killproc.py
index 18e8310b5f..18e8310b5f 100644
--- a/lib/spack/external/py/_process/killproc.py
+++ b/lib/spack/external/pytest-fallback/py/_process/killproc.py
diff --git a/lib/spack/external/py/_std.py b/lib/spack/external/pytest-fallback/py/_std.py
index 97a9853323..97a9853323 100644
--- a/lib/spack/external/py/_std.py
+++ b/lib/spack/external/pytest-fallback/py/_std.py
diff --git a/lib/spack/external/py/_xmlgen.py b/lib/spack/external/pytest-fallback/py/_xmlgen.py
index 1c83545884..1c83545884 100644
--- a/lib/spack/external/py/_xmlgen.py
+++ b/lib/spack/external/pytest-fallback/py/_xmlgen.py
diff --git a/lib/spack/external/py/test.py b/lib/spack/external/pytest-fallback/py/test.py
index aa5beb1789..aa5beb1789 100644
--- a/lib/spack/external/py/test.py
+++ b/lib/spack/external/pytest-fallback/py/test.py
diff --git a/lib/spack/external/pytest.py b/lib/spack/external/pytest-fallback/pytest.py
index 6e124db418..6e124db418 100644
--- a/lib/spack/external/pytest.py
+++ b/lib/spack/external/pytest-fallback/pytest.py
diff --git a/lib/spack/external/six.py b/lib/spack/external/six.py
index 6bf4fd3810..4e15675d8b 100644
--- a/lib/spack/external/six.py
+++ b/lib/spack/external/six.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2010-2017 Benjamin Peterson
+# Copyright (c) 2010-2020 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -29,7 +29,7 @@ import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.11.0"
+__version__ = "1.16.0"
# Useful for very coarse version differentiation.
@@ -71,6 +71,11 @@ else:
MAXSIZE = int((1 << 63) - 1)
del X
+if PY34:
+ from importlib.util import spec_from_loader
+else:
+ spec_from_loader = None
+
def _add_doc(func, doc):
"""Add documentation to a function."""
@@ -186,6 +191,11 @@ class _SixMetaPathImporter(object):
return self
return None
+ def find_spec(self, fullname, path, target=None):
+ if fullname in self.known_modules:
+ return spec_from_loader(fullname, self)
+ return None
+
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
@@ -223,6 +233,12 @@ class _SixMetaPathImporter(object):
return None
get_source = get_code # same as get_code
+ def create_module(self, spec):
+ return self.load_module(spec.name)
+
+ def exec_module(self, module):
+ pass
+
_importer = _SixMetaPathImporter(__name__)
@@ -255,9 +271,11 @@ _moved_attributes = [
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
+ MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
@@ -637,13 +655,16 @@ if PY3:
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
+ del io
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
+ _assertNotRegex = "assertNotRegex"
else:
def b(s):
return s
@@ -665,6 +686,7 @@ else:
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
@@ -681,6 +703,10 @@ def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
if PY3:
exec_ = getattr(moves.builtins, "exec")
@@ -716,16 +742,7 @@ else:
""")
-if sys.version_info[:2] == (3, 2):
- exec_("""def raise_from(value, from_value):
- try:
- if from_value is None:
- raise value
- raise value from from_value
- finally:
- value = None
-""")
-elif sys.version_info[:2] > (3, 2):
+if sys.version_info[:2] > (3,):
exec_("""def raise_from(value, from_value):
try:
raise value from from_value
@@ -805,13 +822,33 @@ if sys.version_info[:2] < (3, 3):
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(wrapper, wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
- def wrapper(f):
- f = functools.wraps(wrapped, assigned, updated)(f)
- f.__wrapped__ = wrapped
- return f
- return wrapper
+ return functools.partial(_update_wrapper, wrapped=wrapped,
+ assigned=assigned, updated=updated)
+ wraps.__doc__ = functools.wraps.__doc__
+
else:
wraps = functools.wraps
@@ -824,7 +861,15 @@ def with_metaclass(meta, *bases):
class metaclass(type):
def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d['__orig_bases__'] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
@@ -844,13 +889,75 @@ def add_metaclass(metaclass):
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, binary_type):
+ return s
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
+ if PY2 and isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
def python_2_unicode_compatible(klass):
"""
- A decorator that defines __unicode__ and __str__ methods under Python 2.
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
diff --git a/lib/spack/llnl/util/multiproc.py b/lib/spack/llnl/util/multiproc.py
index c73ebaed20..2a261d143e 100644
--- a/lib/spack/llnl/util/multiproc.py
+++ b/lib/spack/llnl/util/multiproc.py
@@ -16,7 +16,7 @@ __all__ = ['Barrier']
class Barrier:
"""Simple reusable semaphore barrier.
- Python 2.6 doesn't have multiprocessing barriers so we implement this.
+ Python 2 doesn't have multiprocessing barriers so we implement this.
See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
"""
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index a7a4637ba9..81c779661f 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -607,7 +607,7 @@ class log_output(object):
self._active = True
# return this log_output object so that the user can do things
- # like temporarily echo some ouptut.
+ # like temporarily echo some output.
return self
def __exit__(self, exc_type, exc_val, exc_tb):
diff --git a/lib/spack/spack/analyzers/libabigail.py b/lib/spack/spack/analyzers/libabigail.py
index 9b26f3ca6f..88802ec28c 100644
--- a/lib/spack/spack/analyzers/libabigail.py
+++ b/lib/spack/spack/analyzers/libabigail.py
@@ -2,8 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-
import os
import llnl.util.tty as tty
@@ -16,6 +14,7 @@ import spack.hooks
import spack.monitor
import spack.package
import spack.repo
+import spack.util.executable
from .analyzer_base import AnalyzerBase
@@ -40,13 +39,12 @@ class Libabigail(AnalyzerBase):
tty.debug("Preparing to use Libabigail, will install if missing.")
with spack.bootstrap.ensure_bootstrap_configuration():
-
# libabigail won't install lib/bin/share without docs
spec = spack.spec.Spec("libabigail+docs")
- spec.concretize()
-
- self.abidw = spack.bootstrap.get_executable(
- "abidw", spec=spec, install=True)
+ spack.bootstrap.ensure_executables_in_path_or_raise(
+ ["abidw"], abstract_spec=spec
+ )
+ self.abidw = spack.util.executable.which('abidw')
def run(self):
"""
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 0a23896b8f..01817a3abd 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
+import collections
import hashlib
import json
import os
@@ -12,10 +13,10 @@ import sys
import tarfile
import tempfile
import traceback
+import warnings
from contextlib import closing
import ruamel.yaml as yaml
-from ordereddict_backport import OrderedDict
from six.moves.urllib.error import HTTPError, URLError
import llnl.util.lang
@@ -27,10 +28,13 @@ import spack.config as config
import spack.database as spack_db
import spack.fetch_strategy as fs
import spack.hash_types as ht
+import spack.hooks
import spack.hooks.sbang
import spack.mirror
import spack.platforms
import spack.relocate as relocate
+import spack.repo
+import spack.store
import spack.util.file_cache as file_cache
import spack.util.gpg
import spack.util.spack_json as sjson
@@ -975,8 +979,11 @@ def generate_key_index(key_prefix, tmpdir=None):
shutil.rmtree(tmpdir)
-def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
- allow_root=False, key=None, regenerate_index=False):
+def _build_tarball(
+ spec, outdir,
+ force=False, relative=False, unsigned=False,
+ allow_root=False, key=None, regenerate_index=False
+):
"""
Build a tarball from given spec and put it into the directory structure
used at the mirror (following <tarball_directory_name>).
@@ -1044,11 +1051,11 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
os.remove(temp_tarfile_path)
# create info for later relocation and create tar
- write_buildinfo_file(spec, workdir, rel)
+ write_buildinfo_file(spec, workdir, relative)
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
- if rel:
+ if relative:
try:
make_package_relative(workdir, spec, allow_root)
except Exception as e:
@@ -1096,7 +1103,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
buildinfo = {}
buildinfo['relative_prefix'] = os.path.relpath(
spec.prefix, spack.store.layout.root)
- buildinfo['relative_rpaths'] = rel
+ buildinfo['relative_rpaths'] = relative
spec_dict['buildinfo'] = buildinfo
with open(specfile_path, 'w') as outfile:
@@ -1148,6 +1155,64 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
return None
+def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
+ """Return the list of nodes to be packaged, given a list of specs.
+
+ Args:
+ specs (List[spack.spec.Spec]): list of root specs to be processed
+ include_root (bool): include the root of each spec in the nodes
+ include_dependencies (bool): include the dependencies of each
+ spec in the nodes
+ """
+ if not include_root and not include_dependencies:
+ return set()
+
+ def skip_node(current_node):
+ if current_node.external or current_node.virtual:
+ return True
+ return spack.store.db.query_one(current_node) is None
+
+ expanded_set = set()
+ for current_spec in specs:
+ if not include_dependencies:
+ nodes = [current_spec]
+ else:
+ nodes = [n for n in current_spec.traverse(
+ order='post', root=include_root, deptype=('link', 'run')
+ )]
+
+ for node in nodes:
+ if not skip_node(node):
+ expanded_set.add(node)
+
+ return expanded_set
+
+
+def push(specs, push_url, specs_kwargs=None, **kwargs):
+ """Create a binary package for each of the specs passed as input and push them
+ to a given push URL.
+
+ Args:
+ specs (List[spack.spec.Spec]): installed specs to be packaged
+ push_url (str): url where to push the binary package
+ specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
+ and "include_dependencies", which determine which part of each spec is
+ packaged and pushed to the mirror
+ **kwargs: TODO
+
+ """
+ specs_kwargs = specs_kwargs or {'include_root': True, 'include_dependencies': True}
+ nodes = nodes_to_be_packaged(specs, **specs_kwargs)
+
+ # TODO: This seems to be an easy target for task
+ # TODO: distribution using a parallel pool
+ for node in nodes:
+ try:
+ _build_tarball(node, push_url, **kwargs)
+ except NoOverwriteException as e:
+ warnings.warn(str(e))
+
+
def download_tarball(spec, preferred_mirrors=None):
"""
Download binary tarball for given package into stage area, returning
@@ -1278,8 +1343,8 @@ def relocate_package(spec, allow_root):
# Spurious replacements (e.g. sbang) will cause issues with binaries
# For example, the new sbang can be longer than the old one.
# Hence 2 dictionaries are maintained here.
- prefix_to_prefix_text = OrderedDict({})
- prefix_to_prefix_bin = OrderedDict({})
+ prefix_to_prefix_text = collections.OrderedDict()
+ prefix_to_prefix_bin = collections.OrderedDict()
if old_sbang_install_path:
install_path = spack.hooks.sbang.sbang_install_path()
@@ -1486,6 +1551,66 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
os.remove(filename)
+def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
+ """Install the root node of a concrete spec from a buildcache.
+
+ Checking the sha256 sum of a node before installation is usually needed only
+ for software installed during Spack's bootstrapping (since we might not have
+ a proper signature verification mechanism available).
+
+ Args:
+ spec: spec to be installed (note that only the root node will be installed)
+ allow_root (bool): allows the root directory to be present in binaries
+ (may affect relocation)
+ unsigned (bool): if True allows installing unsigned binaries
+ force (bool): force installation if the spec is already present in the
+ local store
+ sha256 (str): optional sha256 of the binary package, to be checked
+ before installation
+ """
+ package = spack.repo.get(spec)
+ # Early termination
+ if spec.external or spec.virtual:
+ warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
+ return
+ elif spec.concrete and package.installed and not force:
+ warnings.warn("Package for spec {0} already installed.".format(spec.format()))
+ return
+
+ tarball = download_tarball(spec)
+ if not tarball:
+ msg = 'download of binary cache file for spec "{0}" failed'
+ raise RuntimeError(msg.format(spec.format()))
+
+ if sha256:
+ checker = spack.util.crypto.Checker(sha256)
+ msg = 'cannot verify checksum for "{0}" [expected={1}]'
+ msg = msg.format(tarball, sha256)
+ if not checker.check(tarball):
+ raise spack.binary_distribution.NoChecksumException(msg)
+ tty.debug('Verified SHA256 checksum of the build cache')
+
+ tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
+ extract_tarball(spec, tarball, allow_root, unsigned, force)
+ spack.hooks.post_install(spec)
+ spack.store.db.add(spec, spack.store.layout)
+
+
+def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
+ """Install a single concrete spec from a buildcache.
+
+ Args:
+ spec (spack.spec.Spec): spec to be installed
+ allow_root (bool): allows the root directory to be present in binaries
+ (may affect relocation)
+ unsigned (bool): if True allows installing unsigned binaries
+ force (bool): force installation if the spec is already present in the
+ local store
+ """
+ for node in spec.traverse(root=True, order='post', deptype=('link', 'run')):
+ install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
+
+
def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
"""
Try to find the spec directly on the configured mirrors
@@ -1937,3 +2062,73 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
continue
return False
+
+
+def download_single_spec(
+ concrete_spec, destination, require_cdashid=False, mirror_url=None
+):
+ """Download the buildcache files for a single concrete spec.
+
+ Args:
+ concrete_spec: concrete spec to be downloaded
+ destination (str): path where to put the downloaded buildcache
+ require_cdashid (bool): if False the `.cdashid` file is optional
+ mirror_url (str): url of the mirror from which to download
+ """
+ tarfile_name = tarball_name(concrete_spec, '.spack')
+ tarball_dir_name = tarball_directory_name(concrete_spec)
+ tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
+ local_tarball_path = os.path.join(destination, tarball_dir_name)
+
+ files_to_fetch = [
+ {
+ 'url': [tarball_path_name],
+ 'path': local_tarball_path,
+ 'required': True,
+ }, {
+ 'url': [tarball_name(concrete_spec, '.spec.json'),
+ tarball_name(concrete_spec, '.spec.yaml')],
+ 'path': destination,
+ 'required': True,
+ }, {
+ 'url': [tarball_name(concrete_spec, '.cdashid')],
+ 'path': destination,
+ 'required': require_cdashid,
+ },
+ ]
+
+ return download_buildcache_entry(files_to_fetch, mirror_url)
+
+
+class BinaryCacheQuery(object):
+ """Callable object to query if a spec is in a binary cache"""
+ def __init__(self, all_architectures):
+ """
+ Args:
+ all_architectures (bool): if True consider all the spec for querying,
+ otherwise restrict to the current default architecture
+ """
+ self.all_architectures = all_architectures
+
+ specs = update_cache_and_get_specs()
+
+ if not self.all_architectures:
+ arch = spack.spec.Spec.default_arch()
+ specs = [s for s in specs if s.satisfies(arch)]
+
+ self.possible_specs = specs
+
+ def __call__(self, spec, **kwargs):
+ matches = []
+ if spec.startswith('/'):
+ # Matching a DAG hash
+ query_hash = spec.replace('/', '')
+ for candidate_spec in self.possible_specs:
+ if candidate_spec.dag_hash().startswith(query_hash):
+ matches.append(candidate_spec)
+ else:
+ # Matching a spec constraint
+ matches = [
+ s for s in self.possible_specs if s.satisfies(spec)
+ ]
+ return matches
diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py
index 52fadbf700..3cb649789d 100644
--- a/lib/spack/spack/bootstrap.py
+++ b/lib/spack/spack/bootstrap.py
@@ -10,14 +10,12 @@ import functools
import json
import os
import os.path
+import platform
import re
import sys
+import sysconfig
-try:
- import sysconfig # novm
-except ImportError:
- # Not supported on Python 2.6
- pass
+import six
import archspec.cpu
@@ -28,7 +26,6 @@ import spack.binary_distribution
import spack.config
import spack.detection
import spack.environment
-import spack.main
import spack.modules
import spack.paths
import spack.platforms
@@ -38,10 +35,6 @@ import spack.store
import spack.user_environment
import spack.util.executable
import spack.util.path
-from spack.util.environment import EnvironmentModifications
-
-#: "spack buildcache" command, initialized lazily
-_buildcache_cmd = None
#: Map a bootstrapper type to the corresponding class
_bootstrap_methods = {}
@@ -60,29 +53,39 @@ def _bootstrapper(type):
return _register
-def _try_import_from_store(module, abstract_spec_str):
+def _try_import_from_store(module, query_spec, query_info=None):
"""Return True if the module can be imported from an already
installed spec, False otherwise.
Args:
module: Python module to be imported
- abstract_spec_str: abstract spec that may provide the module
+ query_spec: spec that may provide the module
+ query_info (dict or None): if a dict is passed it is populated with the
+ command found and the concrete spec providing it
"""
- bincache_platform = spack.platforms.real_host()
- if str(bincache_platform) == 'cray':
- bincache_platform = spack.platforms.linux.Linux()
- with spack.platforms.use_platform(bincache_platform):
- abstract_spec_str = str(spack.spec.Spec(abstract_spec_str))
+ # If it is a string assume it's one of the root specs by this module
+ if isinstance(query_spec, six.string_types):
+ bincache_platform = spack.platforms.real_host()
+ if str(bincache_platform) == 'cray':
+ bincache_platform = spack.platforms.linux.Linux()
+ with spack.platforms.use_platform(bincache_platform):
+ query_spec = str(spack.spec.Spec(query_spec))
- # We have to run as part of this python interpreter
- abstract_spec_str += ' ^' + spec_for_current_python()
+ # We have to run as part of this python interpreter
+ query_spec += ' ^' + spec_for_current_python()
- installed_specs = spack.store.db.query(abstract_spec_str, installed=True)
+ installed_specs = spack.store.db.query(query_spec, installed=True)
for candidate_spec in installed_specs:
- lib_spd = candidate_spec['python'].package.default_site_packages_dir
+ python_spec = candidate_spec['python']
+ lib_spd = python_spec.package.default_site_packages_dir
lib64_spd = lib_spd.replace('lib/', 'lib64/')
+ lib_debian_derivative = os.path.join(
+ 'lib', 'python{0}'.format(python_spec.version.up_to(1)), 'dist-packages'
+ )
+
module_paths = [
+ os.path.join(candidate_spec.prefix, lib_debian_derivative),
os.path.join(candidate_spec.prefix, lib_spd),
os.path.join(candidate_spec.prefix, lib64_spd)
]
@@ -93,9 +96,11 @@ def _try_import_from_store(module, abstract_spec_str):
if _python_import(module):
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
'provides the "{0}" Python module').format(
- module, abstract_spec_str, candidate_spec.dag_hash()
+ module, query_spec, candidate_spec.dag_hash()
)
tty.debug(msg)
+ if query_info is not None:
+ query_info['spec'] = candidate_spec
return True
except Exception as e:
msg = ('unexpected error while trying to import module '
@@ -105,7 +110,7 @@ def _try_import_from_store(module, abstract_spec_str):
msg = "Spec {0} did not provide module {1}"
tty.warn(msg.format(candidate_spec, module))
- sys.path = sys.path[:-2]
+ sys.path = sys.path[:-3]
return False
@@ -175,7 +180,7 @@ def _fix_ext_suffix(candidate_spec):
os.symlink(abs_path, link_name)
-def _executables_in_store(executables, abstract_spec_str):
+def _executables_in_store(executables, query_spec, query_info=None):
"""Return True if at least one of the executables can be retrieved from
a spec in store, False otherwise.
@@ -185,12 +190,14 @@ def _executables_in_store(executables, abstract_spec_str):
Args:
executables: list of executables to be searched
- abstract_spec_str: abstract spec that may provide the executable
+ query_spec: spec that may provide the executable
+ query_info (dict or None): if a dict is passed it is populated with the
+ command found and the concrete spec providing it
"""
executables_str = ', '.join(executables)
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
- tty.debug(msg.format(executables_str, abstract_spec_str))
- installed_specs = spack.store.db.query(abstract_spec_str, installed=True)
+ tty.debug(msg.format(executables_str, query_spec))
+ installed_specs = spack.store.db.query(query_spec, installed=True)
if installed_specs:
for concrete_spec in installed_specs:
bin_dir = concrete_spec.prefix.bin
@@ -199,6 +206,11 @@ def _executables_in_store(executables, abstract_spec_str):
if (os.path.exists(bin_dir) and os.path.isdir(bin_dir) and
spack.util.executable.which_string(*executables, path=bin_dir)):
spack.util.environment.path_put_first('PATH', [bin_dir])
+ if query_info is not None:
+ query_info['command'] = spack.util.executable.which(
+ *executables, path=bin_dir
+ )
+ query_info['spec'] = concrete_spec
return True
return False
@@ -209,6 +221,7 @@ class _BuildcacheBootstrapper(object):
def __init__(self, conf):
self.name = conf['name']
self.url = conf['info']['url']
+ self.last_search = None
@staticmethod
def _spec_and_platform(abstract_spec_str):
@@ -242,11 +255,6 @@ class _BuildcacheBootstrapper(object):
return data
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
- global _buildcache_cmd
-
- if _buildcache_cmd is None:
- _buildcache_cmd = spack.main.SpackCommand('buildcache')
-
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
# Reconstruct the compiler that we need to use for bootstrapping
compiler_entry = {
@@ -266,13 +274,18 @@ class _BuildcacheBootstrapper(object):
'compilers', [{'compiler': compiler_entry}]
):
spec_str = '/' + pkg_hash
- install_args = [
- 'install',
- '--sha256', pkg_sha256,
- '--only-root',
- '-a', '-u', '-o', '-f', spec_str
- ]
- _buildcache_cmd(*install_args, fail_on_error=False)
+ query = spack.binary_distribution.BinaryCacheQuery(
+ all_architectures=True
+ )
+ matches = spack.store.find([spec_str], multiple=False, query_fn=query)
+ for match in matches:
+ spack.binary_distribution.install_root_node(
+ match,
+ allow_root=True,
+ unsigned=True,
+ force=True,
+ sha256=pkg_sha256
+ )
def _install_and_test(
self, abstract_spec, bincache_platform, bincache_data, test_fn
@@ -304,7 +317,9 @@ class _BuildcacheBootstrapper(object):
pkg_hash, pkg_sha256, index, bincache_platform
)
- if test_fn():
+ info = {}
+ if test_fn(query_spec=abstract_spec, query_info=info):
+ self.last_search = info
return True
return False
@@ -315,8 +330,8 @@ class _BuildcacheBootstrapper(object):
)
def try_import(self, module, abstract_spec_str):
- test_fn = functools.partial(_try_import_from_store, module, abstract_spec_str)
- if test_fn():
+ test_fn, info = functools.partial(_try_import_from_store, module), {}
+ if test_fn(query_spec=abstract_spec_str, query_info=info):
return True
tty.info("Bootstrapping {0} from pre-built binaries".format(module))
@@ -329,15 +344,12 @@ class _BuildcacheBootstrapper(object):
)
def try_search_path(self, executables, abstract_spec_str):
- test_fn = functools.partial(
- _executables_in_store, executables, abstract_spec_str
- )
- if test_fn():
+ test_fn, info = functools.partial(_executables_in_store, executables), {}
+ if test_fn(query_spec=abstract_spec_str, query_info=info):
+ self.last_search = info
return True
- abstract_spec, bincache_platform = self._spec_and_platform(
- abstract_spec_str
- )
+ abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str)
tty.info("Bootstrapping {0} from pre-built binaries".format(abstract_spec.name))
data = self._read_metadata(abstract_spec.name)
return self._install_and_test(
@@ -350,10 +362,12 @@ class _SourceBootstrapper(object):
"""Install the software needed during bootstrapping from sources."""
def __init__(self, conf):
self.conf = conf
+ self.last_search = None
- @staticmethod
- def try_import(module, abstract_spec_str):
- if _try_import_from_store(module, abstract_spec_str):
+ def try_import(self, module, abstract_spec_str):
+ info = {}
+ if _try_import_from_store(module, abstract_spec_str, query_info=info):
+ self.last_search = info
return True
tty.info("Bootstrapping {0} from sources".format(module))
@@ -384,10 +398,15 @@ class _SourceBootstrapper(object):
# Install the spec that should make the module importable
concrete_spec.package.do_install(fail_fast=True)
- return _try_import_from_store(module, abstract_spec_str=abstract_spec_str)
+ if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
+ self.last_search = info
+ return True
+ return False
def try_search_path(self, executables, abstract_spec_str):
- if _executables_in_store(executables, abstract_spec_str):
+ info = {}
+ if _executables_in_store(executables, abstract_spec_str, query_info=info):
+ self.last_search = info
return True
# If we compile code from sources detecting a few build tools
@@ -399,12 +418,18 @@ class _SourceBootstrapper(object):
abstract_spec_str += ' os=fe'
concrete_spec = spack.spec.Spec(abstract_spec_str)
- concrete_spec.concretize()
+ if concrete_spec.name == 'patchelf':
+ concrete_spec._old_concretize(deprecation_warning=False)
+ else:
+ concrete_spec.concretize()
- msg = "[BOOTSTRAP GnuPG] Try installing '{0}' from sources"
+ msg = "[BOOTSTRAP] Try installing '{0}' from sources"
tty.debug(msg.format(abstract_spec_str))
concrete_spec.package.do_install()
- return _executables_in_store(executables, abstract_spec_str)
+ if _executables_in_store(executables, concrete_spec, query_info=info):
+ self.last_search = info
+ return True
+ return False
def _make_bootstrapper(conf):
@@ -527,9 +552,13 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
Raises:
RuntimeError: if the executables cannot be ensured to be in PATH
+
+ Return:
+ Executable object
"""
- if spack.util.executable.which_string(*executables):
- return
+ cmd = spack.util.executable.which(*executables)
+ if cmd:
+ return cmd
executables_str = ', '.join(executables)
source_configs = spack.config.get('bootstrap:sources', [])
@@ -543,7 +572,17 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
b = _make_bootstrapper(current_config)
try:
if b.try_search_path(executables, abstract_spec):
- return
+ # Additional environment variables needed
+ concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
+ env_mods = spack.util.environment.EnvironmentModifications()
+ for dep in concrete_spec.traverse(
+ root=True, order='post', deptype=('link', 'run')
+ ):
+ env_mods.extend(
+ spack.user_environment.environment_modifications_for_spec(dep)
+ )
+ cmd.add_default_envmod(env_mods)
+ return cmd
except Exception as e:
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
tty.debug(msg.format(executables_str, str(e)))
@@ -563,75 +602,6 @@ def _python_import(module):
return True
-def get_executable(exe, spec=None, install=False):
- """Find an executable named exe, either in PATH or in Spack
-
- Args:
- exe (str): needed executable name
- spec (spack.spec.Spec or str): spec to search for exe in (default exe)
- install (bool): install spec if not available
-
- When ``install`` is True, Spack will use the python used to run Spack as an
- external. The ``install`` option should only be used with packages that
- install quickly (when using external python) or are guaranteed by Spack
- organization to be in a binary mirror (clingo).
- """
- # Search the system first
- runner = spack.util.executable.which(exe)
- if runner:
- return runner
-
- # Check whether it's already installed
- spec = spack.spec.Spec(spec or exe)
- installed_specs = spack.store.db.query(spec, installed=True)
- for ispec in installed_specs:
- # filter out directories of the same name as the executable
- exe_path = [exe_p for exe_p in fs.find(ispec.prefix, exe)
- if fs.is_exe(exe_p)]
- if exe_path:
- ret = spack.util.executable.Executable(exe_path[0])
- envmod = EnvironmentModifications()
- for dep in ispec.traverse(root=True, order='post'):
- envmod.extend(
- spack.user_environment.environment_modifications_for_spec(dep)
- )
- ret.add_default_envmod(envmod)
- return ret
- else:
- tty.warn('Exe %s not found in prefix %s' % (exe, ispec.prefix))
-
- def _raise_error(executable, exe_spec):
- error_msg = 'cannot find the executable "{0}"'.format(executable)
- if exe_spec:
- error_msg += ' from spec "{0}'.format(exe_spec)
- raise RuntimeError(error_msg)
-
- # If we're not allowed to install this for ourselves, we can't find it
- if not install:
- _raise_error(exe, spec)
-
- with spack_python_interpreter():
- # We will install for ourselves, using this python if needed
- # Concretize the spec
- spec.concretize()
-
- spec.package.do_install()
- # filter out directories of the same name as the executable
- exe_path = [exe_p for exe_p in fs.find(spec.prefix, exe)
- if fs.is_exe(exe_p)]
- if exe_path:
- ret = spack.util.executable.Executable(exe_path[0])
- envmod = EnvironmentModifications()
- for dep in spec.traverse(root=True, order='post'):
- envmod.extend(
- spack.user_environment.environment_modifications_for_spec(dep)
- )
- ret.add_default_envmod(envmod)
- return ret
-
- _raise_error(exe, spec)
-
-
def _bootstrap_config_scopes():
tty.debug('[BOOTSTRAP CONFIG SCOPE] name=_builtin')
config_scopes = [
@@ -674,8 +644,30 @@ def _add_externals_if_missing():
spack.detection.update_configuration(detected_packages, scope='bootstrap')
+#: Reference counter for the bootstrapping configuration context manager
+_REF_COUNT = 0
+
+
@contextlib.contextmanager
def ensure_bootstrap_configuration():
+ # The context manager is reference counted to ensure we don't swap multiple
+ # times if there's nested use of it in the stack. One compelling use case
+ # is bootstrapping patchelf during the bootstrap of clingo.
+ global _REF_COUNT
+ already_swapped = bool(_REF_COUNT)
+ _REF_COUNT += 1
+ try:
+ if already_swapped:
+ yield
+ else:
+ with _ensure_bootstrap_configuration():
+ yield
+ finally:
+ _REF_COUNT -= 1
+
+
+@contextlib.contextmanager
+def _ensure_bootstrap_configuration():
bootstrap_store_path = store_path()
user_configuration = _read_and_sanitize_configuration()
with spack.environment.no_active_environment():
@@ -783,6 +775,205 @@ def gnupg_root_spec():
def ensure_gpg_in_path_or_raise():
"""Ensure gpg or gpg2 are in the PATH or raise."""
- ensure_executables_in_path_or_raise(
- executables=['gpg2', 'gpg'], abstract_spec=gnupg_root_spec(),
+ return ensure_executables_in_path_or_raise(
+ executables=['gpg2', 'gpg'], abstract_spec=gnupg_root_spec()
)
+
+
+def patchelf_root_spec():
+ """Return the root spec used to bootstrap patchelf"""
+ # TODO: patchelf is restricted to v0.13 since earlier versions have
+ # TODO: bugs that we don't to deal with, while v0.14 requires a C++17
+ # TODO: which may not be available on all platforms.
+ return _root_spec('patchelf@0.13.1:0.13.99')
+
+
+def ensure_patchelf_in_path_or_raise():
+ """Ensure patchelf is in the PATH or raise."""
+ return ensure_executables_in_path_or_raise(
+ executables=['patchelf'], abstract_spec=patchelf_root_spec()
+ )
+
+
+###
+# Development dependencies
+###
+
+
+def isort_root_spec():
+ return _root_spec('py-isort@4.3.5:')
+
+
+def ensure_isort_in_path_or_raise():
+ """Ensure that isort is in the PATH or raise."""
+ executable, root_spec = 'isort', isort_root_spec()
+ return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
+
+
+def mypy_root_spec():
+ return _root_spec('py-mypy@0.900:')
+
+
+def ensure_mypy_in_path_or_raise():
+ """Ensure that mypy is in the PATH or raise."""
+ executable, root_spec = 'mypy', mypy_root_spec()
+ return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
+
+
+def black_root_spec():
+ return _root_spec('py-black')
+
+
+def ensure_black_in_path_or_raise():
+ """Ensure that isort is in the PATH or raise."""
+ executable, root_spec = 'black', black_root_spec()
+ return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
+
+
+def flake8_root_spec():
+ return _root_spec('py-flake8')
+
+
+def ensure_flake8_in_path_or_raise():
+ """Ensure that flake8 is in the PATH or raise."""
+ executable, root_spec = 'flake8', flake8_root_spec()
+ return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
+
+
+def _missing(name, purpose, system_only=True):
+ """Message to be printed if an executable is not found"""
+ msg = '[{2}] MISSING "{0}": {1}'
+ if not system_only:
+ return msg.format(name, purpose, '@*y{{B}}')
+ return msg.format(name, purpose, '@*y{{-}}')
+
+
+def _required_system_executable(exes, msg):
+ """Search for an executable is the system path only."""
+ if isinstance(exes, six.string_types):
+ exes = (exes,)
+ if spack.util.executable.which_string(*exes):
+ return True, None
+ return False, msg
+
+
+def _required_python_module(module, query_spec, msg):
+ """Check if a Python module is available in the current interpreter or
+ if it can be loaded from the bootstrap store
+ """
+ if _python_import(module) or _try_import_from_store(module, query_spec):
+ return True, None
+ return False, msg
+
+
+def _required_executable(exes, query_spec, msg):
+ """Search for an executable in the system path or in the bootstrap store."""
+ if isinstance(exes, six.string_types):
+ exes = (exes,)
+ if (spack.util.executable.which_string(*exes) or
+ _executables_in_store(exes, query_spec)):
+ return True, None
+ return False, msg
+
+
+def _core_requirements():
+ _core_system_exes = {
+ 'make': _missing('make', 'required to build software from sources'),
+ 'patch': _missing('patch', 'required to patch source code before building'),
+ 'bash': _missing('bash', 'required for Spack compiler wrapper'),
+ 'tar': _missing('tar', 'required to manage code archives'),
+ 'gzip': _missing('gzip', 'required to compress/decompress code archives'),
+ 'unzip': _missing('unzip', 'required to compress/decompress code archives'),
+ 'bzip2': _missing('bzip2', 'required to compress/decompress code archives'),
+ 'git': _missing('git', 'required to fetch/manage git repositories')
+ }
+ if platform.system().lower() == 'linux':
+ _core_system_exes['xz'] = _missing(
+ 'xz', 'required to compress/decompress code archives'
+ )
+
+ # Executables that are not bootstrapped yet
+ result = [_required_system_executable(exe, msg)
+ for exe, msg in _core_system_exes.items()]
+ # Python modules
+ result.append(_required_python_module(
+ 'clingo', clingo_root_spec(),
+ _missing('clingo', 'required to concretize specs', False)
+ ))
+ return result
+
+
+def _buildcache_requirements():
+ _buildcache_exes = {
+ 'file': _missing('file', 'required to analyze files for buildcaches'),
+ ('gpg2', 'gpg'): _missing('gpg2', 'required to sign/verify buildcaches', False)
+ }
+ if platform.system().lower() == 'darwin':
+ _buildcache_exes['otool'] = _missing('otool', 'required to relocate binaries')
+
+ # Executables that are not bootstrapped yet
+ result = [_required_system_executable(exe, msg)
+ for exe, msg in _buildcache_exes.items()]
+
+ if platform.system().lower() == 'linux':
+ result.append(_required_executable(
+ 'patchelf', patchelf_root_spec(),
+ _missing('patchelf', 'required to relocate binaries', False)
+ ))
+
+ return result
+
+
+def _optional_requirements():
+ _optional_exes = {
+ 'zstd': _missing('zstd', 'required to compress/decompress code archives'),
+ 'svn': _missing('svn', 'required to manage subversion repositories'),
+ 'hg': _missing('hg', 'required to manage mercurial repositories')
+ }
+ # Executables that are not bootstrapped yet
+ result = [_required_system_executable(exe, msg)
+ for exe, msg in _optional_exes.items()]
+ return result
+
+
+def _development_requirements():
+ return [
+ _required_executable('isort', isort_root_spec(),
+ _missing('isort', 'required for style checks', False)),
+ _required_executable('mypy', mypy_root_spec(),
+ _missing('mypy', 'required for style checks', False)),
+ _required_executable('flake8', flake8_root_spec(),
+ _missing('flake8', 'required for style checks', False)),
+ _required_executable('black', black_root_spec(),
+ _missing('black', 'required for code formatting', False))
+ ]
+
+
+def status_message(section):
+ """Return a status message to be printed to screen that refers to the
+ section passed as argument and a bool which is True if there are missing
+ dependencies.
+
+ Args:
+ section (str): either 'core' or 'buildcache' or 'optional' or 'develop'
+ """
+ pass_token, fail_token = '@*g{[PASS]}', '@*r{[FAIL]}'
+
+ # Contain the header of the section and a list of requirements
+ spack_sections = {
+ 'core': ("{0} @*{{Core Functionalities}}", _core_requirements),
+ 'buildcache': ("{0} @*{{Binary packages}}", _buildcache_requirements),
+ 'optional': ("{0} @*{{Optional Features}}", _optional_requirements),
+ 'develop': ("{0} @*{{Development Dependencies}}", _development_requirements)
+ }
+ msg, required_software = spack_sections[section]
+
+ with ensure_bootstrap_configuration():
+ missing_software = False
+ for found, err_msg in required_software():
+ if not found:
+ missing_software = True
+ msg += "\n " + err_msg
+ msg += '\n'
+ msg = msg.format(pass_token if not missing_software else fail_token)
+ return msg, missing_software
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 83aa634276..fa1ad76274 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -185,6 +185,13 @@ def clean_environment():
env.unset('LD_PRELOAD')
env.unset('DYLD_INSERT_LIBRARIES')
+ # Avoid <packagename>_ROOT user variables overriding spack dependencies
+ # https://cmake.org/cmake/help/latest/variable/PackageName_ROOT.html
+ # Spack needs SPACK_ROOT though, so we need to exclude that
+ for varname in os.environ.keys():
+ if varname.endswith('_ROOT') and varname != 'SPACK_ROOT':
+ env.unset(varname)
+
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
# interference with Spack dependencies.
# CNL requires these variables to be set (or at least some of them,
diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py
index e0c8cf2e03..9b2de555bc 100644
--- a/lib/spack/spack/build_systems/autotools.py
+++ b/lib/spack/spack/build_systems/autotools.py
@@ -498,6 +498,9 @@ To resolve this problem, please try the following:
for ``<spec-name> foo=x +bar``
+ Note: returns an empty list when the variant is conditional and its condition
+ is not met.
+
Returns:
list: list of strings that corresponds to the activation/deactivation
of the variant that has been processed
@@ -519,6 +522,9 @@ To resolve this problem, please try the following:
msg = '"{0}" is not a variant of "{1}"'
raise KeyError(msg.format(variant, self.name))
+ if variant not in spec.variants:
+ return []
+
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
variant_desc, _ = self.variants[variant]
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
index bf431e139d..e7ea30c6a2 100644
--- a/lib/spack/spack/build_systems/cmake.py
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -267,6 +267,10 @@ class CMakePackage(PackageBase):
"-DSWR:STRING=avx;avx2]
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
+
+ Note: if the provided variant is conditional, and the condition is not met,
+ this function returns an empty string. CMake discards empty strings
+ provided on the command line.
"""
if variant is None:
@@ -276,6 +280,9 @@ class CMakePackage(PackageBase):
raise KeyError(
'"{0}" is not a variant of "{1}"'.format(variant, self.name))
+ if variant not in self.spec.variants:
+ return ''
+
value = self.spec.variants[variant].value
if isinstance(value, (tuple, list)):
# Sort multi-valued variants for reproducibility
diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py
index 634cfad637..79f57b046e 100644
--- a/lib/spack/spack/build_systems/cuda.py
+++ b/lib/spack/spack/build_systems/cuda.py
@@ -35,7 +35,8 @@ class CudaPackage(PackageBase):
variant('cuda_arch',
description='CUDA architecture',
- values=spack.variant.any_combination_of(*cuda_arch_values))
+ values=spack.variant.any_combination_of(*cuda_arch_values),
+ when='+cuda')
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
# https://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
diff --git a/lib/spack/spack/build_systems/intel.py b/lib/spack/spack/build_systems/intel.py
index 2c6732c19a..9968bc2ec6 100644
--- a/lib/spack/spack/build_systems/intel.py
+++ b/lib/spack/spack/build_systems/intel.py
@@ -690,6 +690,12 @@ class IntelPackage(PackageBase):
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
omp_libs = LibraryList(omp_lib_path.strip())
+ elif '%clang' in self.spec:
+ clang = Executable(self.compiler.cc)
+ omp_lib_path = clang(
+ '--print-file-name', 'libomp.%s' % dso_suffix, output=str)
+ omp_libs = LibraryList(omp_lib_path.strip())
+
if len(omp_libs) < 1:
raise_lib_error('Cannot locate OpenMP libraries:', omp_libnames)
@@ -772,7 +778,7 @@ class IntelPackage(PackageBase):
if self.spec.satisfies('threads=openmp'):
if '%intel' in self.spec:
mkl_threading = 'libmkl_intel_thread'
- elif '%gcc' in self.spec:
+ elif '%gcc' in self.spec or '%clang' in self.spec:
mkl_threading = 'libmkl_gnu_thread'
threading_engine_libs = self.openmp_libs
elif self.spec.satisfies('threads=tbb'):
@@ -994,6 +1000,16 @@ class IntelPackage(PackageBase):
libnames,
root=self.component_lib_dir('mpi'),
shared=True, recursive=True) + result
+ # Intel MPI since 2019 depends on libfabric which is not in the
+ # lib directory but in a directory of its own which should be
+ # included in the rpath
+ if self.version_yearlike >= ver('2019'):
+ d = ancestor(self.component_lib_dir('mpi'))
+ if '+external-libfabric' in self.spec:
+ result += self.spec['libfabric'].libs
+ else:
+ result += find_libraries(['libfabric'],
+ os.path.join(d, 'libfabric', 'lib'))
if '^mpi' in self.spec.root and ('+mkl' in self.spec or
self.provides('scalapack')):
@@ -1091,15 +1107,6 @@ class IntelPackage(PackageBase):
# which performs dizzyingly similar but necessarily different
# actions, and (b) function code leaves a bit more breathing
# room within the suffocating corset of flake8 line length.
-
- # Intel MPI since 2019 depends on libfabric which is not in the
- # lib directory but in a directory of its own which should be
- # included in the rpath
- if self.version_yearlike >= ver('2019'):
- d = ancestor(self.component_lib_dir('mpi'))
- libfabrics_path = os.path.join(d, 'libfabric', 'lib')
- env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
- libfabrics_path)
else:
raise InstallError('compilers_of_client arg required for MPI')
diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py
index 0ff77d729b..e50e921915 100644
--- a/lib/spack/spack/build_systems/oneapi.py
+++ b/lib/spack/spack/build_systems/oneapi.py
@@ -99,7 +99,13 @@ class IntelOneApiPackage(Package):
class IntelOneApiLibraryPackage(IntelOneApiPackage):
- """Base class for Intel oneAPI library packages."""
+ """Base class for Intel oneAPI library packages.
+
+ Contains some convenient default implementations for libraries.
+ Implement the method directly in the package if something
+ different is needed.
+
+ """
@property
def headers(self):
@@ -111,3 +117,36 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
lib_path = join_path(self.component_path, 'lib', 'intel64')
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
return find_libraries('*', root=lib_path, shared=True, recursive=True)
+
+
+class IntelOneApiStaticLibraryList(object):
+ """Provides ld_flags when static linking is needed
+
+ Oneapi puts static and dynamic libraries in the same directory, so
+ -l will default to finding the dynamic library. Use absolute
+ paths, as recommended by oneapi documentation.
+
+ Allow both static and dynamic libraries to be supplied by the
+ package.
+ """
+
+ def __init__(self, static_libs, dynamic_libs):
+ self.static_libs = static_libs
+ self.dynamic_libs = dynamic_libs
+
+ @property
+ def directories(self):
+ return self.dynamic_libs.directories
+
+ @property
+ def search_flags(self):
+ return self.dynamic_libs.search_flags
+
+ @property
+ def link_flags(self):
+ return '-Wl,--start-group {0} -Wl,--end-group {1}'.format(
+ ' '.join(self.static_libs.libraries), self.dynamic_libs.link_flags)
+
+ @property
+ def ld_flags(self):
+ return '{0} {1}'.format(self.search_flags, self.link_flags)
diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py
index a308e77cb9..2d003f38e3 100644
--- a/lib/spack/spack/build_systems/python.py
+++ b/lib/spack/spack/build_systems/python.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os
+import re
import shutil
import llnl.util.tty as tty
@@ -144,6 +145,8 @@ class PythonPackage(PackageBase):
modules.append(path.replace(root + os.sep, '', 1).replace(
'.py', '').replace('/', '.'))
+ modules = [mod for mod in modules if re.match('[a-zA-Z0-9._]+$', mod)]
+
tty.debug('Detected the following modules: {0}'.format(modules))
return modules
diff --git a/lib/spack/spack/build_systems/rocm.py b/lib/spack/spack/build_systems/rocm.py
index 977db700db..e2490aaa6e 100644
--- a/lib/spack/spack/build_systems/rocm.py
+++ b/lib/spack/spack/build_systems/rocm.py
@@ -91,7 +91,7 @@ class ROCmPackage(PackageBase):
# Possible architectures
amdgpu_targets = (
'gfx701', 'gfx801', 'gfx802', 'gfx803',
- 'gfx900', 'gfx906', 'gfx908', 'gfx1010',
+ 'gfx900', 'gfx906', 'gfx908', 'gfx90a', 'gfx1010',
'gfx1011', 'gfx1012'
)
@@ -100,7 +100,8 @@ class ROCmPackage(PackageBase):
# possible amd gpu targets for rocm builds
variant('amdgpu_target',
description='AMD GPU architecture',
- values=spack.variant.any_combination_of(*amdgpu_targets))
+ values=spack.variant.any_combination_of(*amdgpu_targets),
+ when='+rocm')
depends_on('llvm-amdgpu', when='+rocm')
depends_on('hsa-rocr-dev', when='+rocm')
diff --git a/lib/spack/spack/build_systems/sip.py b/lib/spack/spack/build_systems/sip.py
index 814aa2605b..49fdd621ee 100644
--- a/lib/spack/spack/build_systems/sip.py
+++ b/lib/spack/spack/build_systems/sip.py
@@ -5,6 +5,7 @@
import inspect
import os
+import re
import llnl.util.tty as tty
from llnl.util.filesystem import find, join_path, working_dir
@@ -81,6 +82,8 @@ class SIPPackage(PackageBase):
modules.append(path.replace(root + os.sep, '', 1).replace(
'.py', '').replace('/', '.'))
+ modules = [mod for mod in modules if re.match('[a-zA-Z0-9._]+$', mod)]
+
tty.debug('Detected the following modules: {0}'.format(modules))
return modules
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index 8d337196ce..f23445f1df 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -1271,6 +1271,7 @@ def get_concrete_specs(env, root_spec, job_name, related_builds,
def register_cdash_build(build_name, base_url, project, site, track):
url = base_url + '/api/v1/addBuild.php'
time_stamp = datetime.datetime.now().strftime('%Y%m%d-%H%M')
+ build_id = None
build_stamp = '{0}-{1}'.format(time_stamp, track)
payload = {
"project": project,
@@ -1292,17 +1293,20 @@ def register_cdash_build(build_name, base_url, project, site, track):
request = Request(url, data=enc_data, headers=headers)
- response = opener.open(request)
- response_code = response.getcode()
+ try:
+ response = opener.open(request)
+ response_code = response.getcode()
- if response_code != 200 and response_code != 201:
- msg = 'Adding build failed (response code = {0}'.format(response_code)
- tty.warn(msg)
- return (None, None)
+ if response_code != 200 and response_code != 201:
+ msg = 'Adding build failed (response code = {0}'.format(response_code)
+ tty.warn(msg)
+ return (None, None)
- response_text = response.read()
- response_json = json.loads(response_text)
- build_id = response_json['buildid']
+ response_text = response.read()
+ response_json = json.loads(response_text)
+ build_id = response_json['buildid']
+ except Exception as e:
+ print("Registering build in CDash failed: {0}".format(e))
return (build_id, build_stamp)
@@ -1412,15 +1416,26 @@ def read_cdashid_from_mirror(spec, mirror_url):
return int(contents)
-def push_mirror_contents(env, spec, specfile_path, mirror_url, sign_binaries):
+def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
+ """Unchecked version of the public API, for easier mocking"""
+ unsigned = not sign_binaries
+ tty.debug('Creating buildcache ({0})'.format(
+ 'unsigned' if unsigned else 'signed'))
+ hashes = env.all_hashes() if env else None
+ matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
+ push_url = spack.mirror.push_url_from_mirror_url(mirror_url)
+ spec_kwargs = {'include_root': True, 'include_dependencies': False}
+ kwargs = {
+ 'force': True,
+ 'allow_root': True,
+ 'unsigned': unsigned
+ }
+ bindist.push(matches, push_url, spec_kwargs, **kwargs)
+
+
+def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
try:
- unsigned = not sign_binaries
- tty.debug('Creating buildcache ({0})'.format(
- 'unsigned' if unsigned else 'signed'))
- spack.cmd.buildcache._createtarball(
- env, spec_file=specfile_path, add_deps=False,
- output_location=mirror_url, force=True, allow_root=True,
- unsigned=unsigned)
+ _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
except Exception as inst:
# If the mirror we're pushing to is on S3 and there's some
# permissions problem, for example, we can't just target
diff --git a/lib/spack/spack/cmd/analyze.py b/lib/spack/spack/cmd/analyze.py
index f584674ae2..6048c47972 100644
--- a/lib/spack/spack/cmd/analyze.py
+++ b/lib/spack/spack/cmd/analyze.py
@@ -110,7 +110,6 @@ def analyze(parser, args, **kwargs):
monitor = spack.monitor.get_client(
host=args.monitor_host,
prefix=args.monitor_prefix,
- disable_auth=args.monitor_disable_auth,
)
# Run the analysis
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index ae3e1b7639..7446650403 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -10,6 +10,8 @@ import shutil
import llnl.util.tty
import llnl.util.tty.color
+import spack
+import spack.bootstrap
import spack.cmd.common.arguments
import spack.config
import spack.main
@@ -32,6 +34,16 @@ def _add_scope_option(parser):
def setup_parser(subparser):
sp = subparser.add_subparsers(dest='subcommand')
+ status = sp.add_parser('status', help='get the status of Spack')
+ status.add_argument(
+ '--optional', action='store_true', default=False,
+ help='show the status of rarely used optional dependencies'
+ )
+ status.add_argument(
+ '--dev', action='store_true', default=False,
+ help='show the status of dependencies needed to develop Spack'
+ )
+
enable = sp.add_parser('enable', help='enable bootstrapping')
_add_scope_option(enable)
@@ -207,8 +219,39 @@ def _untrust(args):
llnl.util.tty.msg(msg.format(args.name))
+def _status(args):
+ sections = ['core', 'buildcache']
+ if args.optional:
+ sections.append('optional')
+ if args.dev:
+ sections.append('develop')
+
+ header = "@*b{{Spack v{0} - {1}}}".format(
+ spack.spack_version, spack.bootstrap.spec_for_current_python()
+ )
+ print(llnl.util.tty.color.colorize(header))
+ print()
+ # Use the context manager here to avoid swapping between user and
+ # bootstrap config many times
+ missing = False
+ with spack.bootstrap.ensure_bootstrap_configuration():
+ for current_section in sections:
+ status_msg, fail = spack.bootstrap.status_message(section=current_section)
+ missing = missing or fail
+ if status_msg:
+ print(llnl.util.tty.color.colorize(status_msg))
+ print()
+ legend = ('Spack will take care of bootstrapping any missing dependency marked'
+ ' as [@*y{B}]. Dependencies marked as [@*y{-}] are instead required'
+ ' to be found on the system.')
+ if missing:
+ print(llnl.util.tty.color.colorize(legend))
+ print()
+
+
def bootstrap(parser, args):
callbacks = {
+ 'status': _status,
'enable': _enable_or_disable,
'disable': _enable_or_disable,
'reset': _reset,
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 1fda884e9a..d6f5d63ea6 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -2,11 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import argparse
import os
import shutil
import sys
import tempfile
+import warnings
import llnl.util.tty as tty
@@ -40,7 +40,7 @@ def setup_parser(subparser):
setup_parser.parser = subparser
subparsers = subparser.add_subparsers(help='buildcache sub-commands')
- create = subparsers.add_parser('create', help=createtarball.__doc__)
+ create = subparsers.add_parser('create', help=create_fn.__doc__)
create.add_argument('-r', '--rel', action='store_true',
help="make all rpaths relative" +
" before creating tarballs.")
@@ -86,9 +86,9 @@ def setup_parser(subparser):
' decide to build a cache for only the package'
' or only the dependencies'))
arguments.add_common_arguments(create, ['specs'])
- create.set_defaults(func=createtarball)
+ create.set_defaults(func=create_fn)
- install = subparsers.add_parser('install', help=installtarball.__doc__)
+ install = subparsers.add_parser('install', help=install_fn.__doc__)
install.add_argument('-f', '--force', action='store_true',
help="overwrite install directory if it exists.")
install.add_argument('-m', '--multiple', action='store_true',
@@ -102,16 +102,11 @@ def setup_parser(subparser):
install.add_argument('-o', '--otherarch', action='store_true',
help="install specs from other architectures" +
" instead of default platform and OS")
- # This argument is needed by the bootstrapping logic to verify checksums
- install.add_argument('--sha256', help=argparse.SUPPRESS)
- install.add_argument(
- '--only-root', action='store_true', help=argparse.SUPPRESS
- )
arguments.add_common_arguments(install, ['specs'])
- install.set_defaults(func=installtarball)
+ install.set_defaults(func=install_fn)
- listcache = subparsers.add_parser('list', help=listspecs.__doc__)
+ listcache = subparsers.add_parser('list', help=list_fn.__doc__)
arguments.add_common_arguments(listcache, ['long', 'very_long'])
listcache.add_argument('-v', '--variants',
action='store_true',
@@ -121,29 +116,25 @@ def setup_parser(subparser):
help="list specs for all available architectures" +
" instead of default platform and OS")
arguments.add_common_arguments(listcache, ['specs'])
- listcache.set_defaults(func=listspecs)
+ listcache.set_defaults(func=list_fn)
- dlkeys = subparsers.add_parser('keys', help=getkeys.__doc__)
- dlkeys.add_argument(
+ keys = subparsers.add_parser('keys', help=keys_fn.__doc__)
+ keys.add_argument(
'-i', '--install', action='store_true',
help="install Keys pulled from mirror")
- dlkeys.add_argument(
+ keys.add_argument(
'-t', '--trust', action='store_true',
help="trust all downloaded keys")
- dlkeys.add_argument('-f', '--force', action='store_true',
- help="force new download of keys")
- dlkeys.set_defaults(func=getkeys)
-
- preview_parser = subparsers.add_parser(
- 'preview',
- help='analyzes an installed spec and reports whether '
- 'executables and libraries are relocatable'
- )
- arguments.add_common_arguments(preview_parser, ['installed_specs'])
- preview_parser.set_defaults(func=preview)
+ keys.add_argument('-f', '--force', action='store_true',
+ help="force new download of keys")
+ keys.set_defaults(func=keys_fn)
+
+ preview = subparsers.add_parser('preview', help=preview_fn.__doc__)
+ arguments.add_common_arguments(preview, ['installed_specs'])
+ preview.set_defaults(func=preview_fn)
# Check if binaries need to be rebuilt on remote mirror
- check = subparsers.add_parser('check', help=check_binaries.__doc__)
+ check = subparsers.add_parser('check', help=check_fn.__doc__)
check.add_argument(
'-m', '--mirror-url', default=None,
help='Override any configured mirrors with this mirror url')
@@ -175,28 +166,28 @@ def setup_parser(subparser):
help="Default to rebuilding packages if errors are encountered " +
"during the process of checking whether rebuilding is needed")
- check.set_defaults(func=check_binaries)
+ check.set_defaults(func=check_fn)
# Download tarball and specfile
- dltarball = subparsers.add_parser('download', help=get_tarball.__doc__)
- dltarball.add_argument(
+ download = subparsers.add_parser('download', help=download_fn.__doc__)
+ download.add_argument(
'-s', '--spec', default=None,
help="Download built tarball for spec from mirror")
- dltarball.add_argument(
+ download.add_argument(
'--spec-file', default=None,
help=("Download built tarball for spec (from json or yaml file) " +
"from mirror"))
- dltarball.add_argument(
+ download.add_argument(
'-p', '--path', default=None,
help="Path to directory where tarball should be downloaded")
- dltarball.add_argument(
+ download.add_argument(
'-c', '--require-cdashid', action='store_true', default=False,
help="Require .cdashid file to be downloaded with buildcache entry")
- dltarball.set_defaults(func=get_tarball)
+ download.set_defaults(func=download_fn)
# Get buildcache name
getbuildcachename = subparsers.add_parser('get-buildcache-name',
- help=get_buildcache_name.__doc__)
+ help=get_buildcache_name_fn.__doc__)
getbuildcachename.add_argument(
'-s', '--spec', default=None,
help='Spec string for which buildcache name is desired')
@@ -204,11 +195,11 @@ def setup_parser(subparser):
'--spec-file', default=None,
help=('Path to spec json or yaml file for which buildcache name is ' +
'desired'))
- getbuildcachename.set_defaults(func=get_buildcache_name)
+ getbuildcachename.set_defaults(func=get_buildcache_name_fn)
# Given the root spec, save the yaml of the dependent spec to a file
savespecfile = subparsers.add_parser('save-specfile',
- help=save_specfiles.__doc__)
+ help=save_specfile_fn.__doc__)
savespecfile.add_argument(
'--root-spec', default=None,
help='Root spec of dependent spec')
@@ -221,10 +212,10 @@ def setup_parser(subparser):
savespecfile.add_argument(
'--specfile-dir', default=None,
help='Path to directory where spec yamls should be saved')
- savespecfile.set_defaults(func=save_specfiles)
+ savespecfile.set_defaults(func=save_specfile_fn)
# Copy buildcache from some directory to another mirror url
- copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
+ copy = subparsers.add_parser('copy', help=copy_fn.__doc__)
copy.add_argument(
'--base-dir', default=None,
help='Path to mirror directory (root of existing buildcache)')
@@ -235,10 +226,10 @@ def setup_parser(subparser):
copy.add_argument(
'--destination-url', default=None,
help='Destination mirror url')
- copy.set_defaults(func=buildcache_copy)
+ copy.set_defaults(func=copy_fn)
# Sync buildcache entries from one mirror to another
- sync = subparsers.add_parser('sync', help=buildcache_sync.__doc__)
+ sync = subparsers.add_parser('sync', help=sync_fn.__doc__)
source = sync.add_mutually_exclusive_group(required=True)
source.add_argument('--src-directory',
metavar='DIRECTORY',
@@ -265,312 +256,110 @@ def setup_parser(subparser):
metavar='MIRROR_URL',
type=str,
help="URL of the destination mirror")
- sync.set_defaults(func=buildcache_sync)
+ sync.set_defaults(func=sync_fn)
# Update buildcache index without copying any additional packages
update_index = subparsers.add_parser(
- 'update-index', help=buildcache_update_index.__doc__)
+ 'update-index', help=update_index_fn.__doc__)
update_index.add_argument(
'-d', '--mirror-url', default=None, help='Destination mirror url')
update_index.add_argument(
'-k', '--keys', default=False, action='store_true',
help='If provided, key index will be updated as well as package index')
- update_index.set_defaults(func=buildcache_update_index)
-
-
-def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
- """Returns a list of specs matching the not necessarily
- concretized specs given from cli
+ update_index.set_defaults(func=update_index_fn)
- Args:
- pkgs (str): spec to be matched against installed packages
- allow_multiple_matches (bool): if True multiple matches are admitted
- env (spack.environment.Environment or None): active environment, or ``None``
- if there is not one
- Return:
- list: list of specs
+def _matching_specs(args):
+ """Return a list of matching specs read from either a spec file (JSON or YAML),
+ a query over the store or a query over the active environment.
"""
+ env = ev.active_environment()
hashes = env.all_hashes() if env else None
+ if args.spec_file:
+ return spack.store.specfile_matches(args.spec_file, hashes=hashes)
- # List of specs that match expressions given via command line
- specs_from_cli = []
- has_errors = False
- tty.debug('find_matching_specs: about to parse specs for {0}'.format(pkgs))
- specs = spack.cmd.parse_specs(pkgs)
- for spec in specs:
- matching = spack.store.db.query(spec, hashes=hashes)
- # For each spec provided, make sure it refers to only one package.
- # Fail and ask user to be unambiguous if it doesn't
- if not allow_multiple_matches and len(matching) > 1:
- tty.error('%s matches multiple installed packages:' % spec)
- for match in matching:
- tty.msg('"%s"' % match.format())
- has_errors = True
-
- # No installed package matches the query
- if len(matching) == 0 and spec is not any:
- tty.error('{0} does not match any installed packages.'.format(
- spec))
- has_errors = True
-
- specs_from_cli.extend(matching)
- if has_errors:
- tty.die('use one of the matching specs above')
-
- return specs_from_cli
-
-
-def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
- other_arch=False):
- """Returns a list of specs matching the not necessarily
- concretized specs given from cli
-
- Args:
- specs: list of specs to be matched against buildcaches on mirror
- allow_multiple_matches : if True multiple matches are admitted
-
- Return:
- list of specs
- """
- # List of specs that match expressions given via command line
- specs_from_cli = []
- has_errors = False
-
- try:
- specs = bindist.update_cache_and_get_specs()
- except bindist.FetchCacheError as e:
- tty.error(e)
-
- if not other_arch:
- arch = spack.spec.Spec.default_arch()
- specs = [s for s in specs if s.satisfies(arch)]
+ if args.specs:
+ constraints = spack.cmd.parse_specs(args.specs)
+ return spack.store.find(constraints, hashes=hashes)
- for pkg in pkgs:
- matches = []
- tty.msg("buildcache spec(s) matching %s \n" % pkg)
- for spec in sorted(specs):
- if pkg.startswith('/'):
- pkghash = pkg.replace('/', '')
- if spec.dag_hash().startswith(pkghash):
- matches.append(spec)
- else:
- if spec.satisfies(pkg):
- matches.append(spec)
- # For each pkg provided, make sure it refers to only one package.
- # Fail and ask user to be unambiguous if it doesn't
- if not allow_multiple_matches and len(matches) > 1:
- tty.error('%s matches multiple downloaded packages:' % pkg)
- for match in matches:
- tty.msg('"%s"' % match.format())
- has_errors = True
-
- # No downloaded package matches the query
- if len(matches) == 0:
- tty.error('%s does not match any downloaded packages.' % pkg)
- has_errors = True
-
- specs_from_cli.extend(matches)
- if has_errors:
- tty.die('use one of the matching specs above')
-
- return specs_from_cli
-
-
-def _createtarball(env, spec_file=None, packages=None, add_spec=True,
- add_deps=True, output_location=os.getcwd(),
- signing_key=None, force=False, make_relative=False,
- unsigned=False, allow_root=False, rebuild_index=False):
- if spec_file:
- with open(spec_file, 'r') as fd:
- specfile_contents = fd.read()
- tty.debug('createtarball read specfile contents:')
- tty.debug(specfile_contents)
- if spec_file.endswith('.json'):
- s = Spec.from_json(specfile_contents)
- else:
- s = Spec.from_yaml(specfile_contents)
- package = '/{0}'.format(s.dag_hash())
- matches = find_matching_specs(package, env=env)
-
- elif packages:
- matches = find_matching_specs(packages, env=env)
-
- elif env:
- matches = [env.specs_by_hash[h] for h in env.concretized_order]
+ if env:
+ return [env.specs_by_hash[h] for h in env.concretized_order]
- else:
- tty.die("build cache file creation requires at least one" +
- " installed package spec, an active environment," +
- " or else a path to a json or yaml file containing a spec" +
- " to install")
- specs = set()
+ tty.die("build cache file creation requires at least one" +
+ " installed package spec, an active environment," +
+ " or else a path to a json or yaml file containing a spec" +
+ " to install")
- mirror = spack.mirror.MirrorCollection().lookup(output_location)
- outdir = url_util.format(mirror.push_url)
-
- msg = 'Buildcache files will be output to %s/build_cache' % outdir
- tty.msg(msg)
- if matches:
- tty.debug('Found at least one matching spec')
+def _concrete_spec_from_args(args):
+ spec_str, specfile_path = args.spec, args.spec_file
- for match in matches:
- tty.debug('examining match {0}'.format(match.format()))
- if match.external or match.virtual:
- tty.debug('skipping external or virtual spec %s' %
- match.format())
- else:
- lookup = spack.store.db.query_one(match)
-
- if not add_spec:
- tty.debug('skipping matching root spec %s' % match.format())
- elif lookup is None:
- tty.debug('skipping uninstalled matching spec %s' %
- match.format())
- else:
- tty.debug('adding matching spec %s' % match.format())
- specs.add(match)
-
- if not add_deps:
- continue
-
- tty.debug('recursing dependencies')
- for d, node in match.traverse(order='post',
- depth=True,
- deptype=('link', 'run')):
- # skip root, since it's handled above
- if d == 0:
- continue
-
- lookup = spack.store.db.query_one(node)
-
- if node.external or node.virtual:
- tty.debug('skipping external or virtual dependency %s' %
- node.format())
- elif lookup is None:
- tty.debug('skipping uninstalled depenendency %s' %
- node.format())
- else:
- tty.debug('adding dependency %s' % node.format())
- specs.add(node)
-
- tty.debug('writing tarballs to %s/build_cache' % outdir)
+ if not spec_str and not specfile_path:
+ tty.error('must provide either spec string or path to YAML or JSON specfile')
+ sys.exit(1)
- for spec in specs:
- tty.debug('creating binary cache file for package %s ' % spec.format())
+ if spec_str:
try:
- bindist.build_tarball(spec, outdir, force, make_relative,
- unsigned, allow_root, signing_key,
- rebuild_index)
- except bindist.NoOverwriteException as e:
- tty.warn(e)
+ constraints = spack.cmd.parse_specs(spec_str)
+ spec = spack.store.find(constraints)[0]
+ spec.concretize()
+ except SpecError as spec_error:
+ tty.error('Unable to concretize spec {0}'.format(spec_str))
+ tty.debug(spec_error)
+ sys.exit(1)
+ return spec
-def createtarball(args):
- """create a binary package from an existing install"""
+ return Spec.from_specfile(specfile_path)
- # restrict matching to current environment if one is active
- env = ev.active_environment()
- output_location = None
+def create_fn(args):
+ """create a binary package and push it to a mirror"""
if args.directory:
- output_location = args.directory
-
- # User meant to provide a path to a local directory.
- # Ensure that they did not accidentally pass a URL.
- scheme = url_util.parse(output_location, scheme='<missing>').scheme
- if scheme != '<missing>':
- raise ValueError(
- '"--directory" expected a local path; got a URL, instead')
-
- # User meant to provide a path to a local directory.
- # Ensure that the mirror lookup does not mistake it for a named mirror.
- output_location = 'file://' + output_location
-
- elif args.mirror_name:
- output_location = args.mirror_name
-
- # User meant to provide the name of a preconfigured mirror.
- # Ensure that the mirror lookup actually returns a named mirror.
- result = spack.mirror.MirrorCollection().lookup(output_location)
- if result.name == "<unnamed>":
- raise ValueError(
- 'no configured mirror named "{name}"'.format(
- name=output_location))
+ push_url = spack.mirror.push_url_from_directory(args.directory)
- elif args.mirror_url:
- output_location = args.mirror_url
+ if args.mirror_name:
+ push_url = spack.mirror.push_url_from_mirror_name(args.mirror_name)
- # User meant to provide a URL for an anonymous mirror.
- # Ensure that they actually provided a URL.
- scheme = url_util.parse(output_location, scheme='<missing>').scheme
- if scheme == '<missing>':
- raise ValueError(
- '"{url}" is not a valid URL'.format(url=output_location))
- add_spec = ('package' in args.things_to_install)
- add_deps = ('dependencies' in args.things_to_install)
-
- _createtarball(env, spec_file=args.spec_file, packages=args.specs,
- add_spec=add_spec, add_deps=add_deps,
- output_location=output_location, signing_key=args.key,
- force=args.force, make_relative=args.rel,
- unsigned=args.unsigned, allow_root=args.allow_root,
- rebuild_index=args.rebuild_index)
+ if args.mirror_url:
+ push_url = spack.mirror.push_url_from_mirror_url(args.mirror_url)
+ matches = _matching_specs(args)
-def installtarball(args):
+ msg = 'Pushing binary packages to {0}/build_cache'.format(push_url)
+ tty.msg(msg)
+ specs_kwargs = {
+ 'include_root': 'package' in args.things_to_install,
+ 'include_dependencies': 'dependencies' in args.things_to_install
+ }
+ kwargs = {
+ 'key': args.key,
+ 'force': args.force,
+ 'relative': args.rel,
+ 'unsigned': args.unsigned,
+ 'allow_root': args.allow_root,
+ 'regenerate_index': args.rebuild_index
+ }
+ bindist.push(matches, push_url, specs_kwargs, **kwargs)
+
+
+def install_fn(args):
"""install from a binary package"""
if not args.specs:
- tty.die("build cache file installation requires" +
- " at least one package spec argument")
- pkgs = set(args.specs)
- matches = match_downloaded_specs(pkgs, args.multiple, args.force,
- args.otherarch)
+ tty.die("a spec argument is required to install from a buildcache")
+ query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
+ matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
for match in matches:
- install_tarball(match, args)
+ bindist.install_single_spec(
+ match,
+ allow_root=args.allow_root,
+ unsigned=args.unsigned,
+ force=args.force
+ )
-def install_tarball(spec, args):
- s = Spec(spec)
- if s.external or s.virtual:
- tty.warn("Skipping external or virtual package %s" % spec.format())
- return
-
- # This argument is used only for bootstrapping specs without signatures,
- # since we need to check the sha256 of each tarball
- if not args.only_root:
- for d in s.dependencies(deptype=('link', 'run')):
- tty.msg("Installing buildcache for dependency spec %s" % d)
- install_tarball(d, args)
-
- package = spack.repo.get(spec)
- if s.concrete and package.installed and not args.force:
- tty.warn("Package for spec %s already installed." % spec.format())
- else:
- tarball = bindist.download_tarball(spec)
- if tarball:
- if args.sha256:
- checker = spack.util.crypto.Checker(args.sha256)
- msg = ('cannot verify checksum for "{0}"'
- ' [expected={1}]')
- msg = msg.format(tarball, args.sha256)
- if not checker.check(tarball):
- raise spack.binary_distribution.NoChecksumException(msg)
- tty.debug('Verified SHA256 checksum of the build cache')
-
- tty.msg('Installing buildcache for spec %s' % spec.format())
- bindist.extract_tarball(spec, tarball, args.allow_root,
- args.unsigned, args.force)
- spack.hooks.post_install(spec)
- spack.store.db.add(spec, spack.store.layout)
- else:
- tty.die('Download of binary cache file for spec %s failed.' %
- spec.format())
-
-
-def listspecs(args):
+def list_fn(args):
"""list binary packages available from mirrors"""
try:
specs = bindist.update_cache_and_get_specs()
@@ -593,19 +382,17 @@ def listspecs(args):
display_specs(specs, args, all_headers=True)
-def getkeys(args):
+def keys_fn(args):
"""get public keys available on mirrors"""
bindist.get_keys(args.install, args.trust, args.force)
-def preview(args):
- """Print a status tree of the selected specs that shows which nodes are
- relocatable and which might not be.
-
- Args:
- args: command line arguments
+def preview_fn(args):
+ """analyze an installed spec and reports whether executables
+ and libraries are relocatable
"""
- specs = find_matching_specs(args.specs, allow_multiple_matches=True)
+ constraints = spack.cmd.parse_specs(args.specs)
+ specs = spack.store.find(constraints, multiple=True)
# Cycle over the specs that match
for spec in specs:
@@ -614,7 +401,7 @@ def preview(args):
print(spec.tree(status_fn=spack.relocate.is_relocatable))
-def check_binaries(args):
+def check_fn(args):
"""Check specs (either a single spec from --spec, or else the full set
of release specs) against remote binary mirror(s) to see if any need
to be rebuilt. This command uses the process exit code to indicate
@@ -622,7 +409,7 @@ def check_binaries(args):
one of the indicated specs needs to be rebuilt.
"""
if args.spec or args.spec_file:
- specs = [get_concrete_spec(args)]
+ specs = [_concrete_spec_from_args(args)]
else:
env = spack.cmd.require_active_env(cmd_name='buildcache')
env.concretize()
@@ -649,34 +436,7 @@ def check_binaries(args):
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
-def download_buildcache_files(concrete_spec, local_dest, require_cdashid,
- mirror_url=None):
- tarfile_name = bindist.tarball_name(concrete_spec, '.spack')
- tarball_dir_name = bindist.tarball_directory_name(concrete_spec)
- tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
- local_tarball_path = os.path.join(local_dest, tarball_dir_name)
-
- files_to_fetch = [
- {
- 'url': [tarball_path_name],
- 'path': local_tarball_path,
- 'required': True,
- }, {
- 'url': [bindist.tarball_name(concrete_spec, '.spec.json'),
- bindist.tarball_name(concrete_spec, '.spec.yaml')],
- 'path': local_dest,
- 'required': True,
- }, {
- 'url': [bindist.tarball_name(concrete_spec, '.cdashid')],
- 'path': local_dest,
- 'required': require_cdashid,
- },
- ]
-
- return bindist.download_buildcache_entry(files_to_fetch, mirror_url)
-
-
-def get_tarball(args):
+def download_fn(args):
"""Download buildcache entry from a remote mirror to local folder. This
command uses the process exit code to indicate its result, specifically,
a non-zero exit code indicates that the command failed to download at
@@ -691,54 +451,30 @@ def get_tarball(args):
tty.msg('No download path provided, exiting')
sys.exit(0)
- spec = get_concrete_spec(args)
- result = download_buildcache_files(spec, args.path, args.require_cdashid)
+ spec = _concrete_spec_from_args(args)
+ result = bindist.download_single_spec(
+ spec, args.path, require_cdashid=args.require_cdashid
+ )
if not result:
sys.exit(1)
-def get_concrete_spec(args):
- spec_str = args.spec
- spec_yaml_path = args.spec_file
-
- if not spec_str and not spec_yaml_path:
- tty.msg('Must provide either spec string or path to ' +
- 'yaml to concretize spec')
- sys.exit(1)
-
- if spec_str:
- try:
- spec = find_matching_specs(spec_str)[0]
- spec.concretize()
- except SpecError as spec_error:
- tty.error('Unable to concrectize spec {0}'.format(args.spec))
- tty.debug(spec_error)
- sys.exit(1)
-
- return spec
-
- with open(spec_yaml_path, 'r') as fd:
- return Spec.from_yaml(fd.read())
-
-
-def get_buildcache_name(args):
+def get_buildcache_name_fn(args):
"""Get name (prefix) of buildcache entries for this spec"""
- spec = get_concrete_spec(args)
+ spec = _concrete_spec_from_args(args)
buildcache_name = bindist.tarball_name(spec, '')
-
print('{0}'.format(buildcache_name))
- sys.exit(0)
-
-def save_specfiles(args):
+def save_specfile_fn(args):
"""Get full spec for dependencies, relative to root spec, and write them
to files in the specified output directory. Uses exit code to signal
success or failure. An exit code of zero means the command was likely
successful. If any errors or exceptions are encountered, or if expected
command-line arguments are not provided, then the exit code will be
- non-zero."""
+ non-zero.
+ """
if not args.root_spec and not args.root_specfile:
tty.msg('No root spec provided, exiting.')
sys.exit(1)
@@ -765,12 +501,15 @@ def save_specfiles(args):
sys.exit(0)
-def buildcache_copy(args):
+def copy_fn(args):
"""Copy a buildcache entry and all its files from one mirror, given as
'--base-dir', to some other mirror, specified as '--destination-url'.
The specific buildcache entry to be copied from one location to the
other is identified using the '--spec-file' argument."""
- # TODO: This sub-command should go away once #11117 is merged
+ # TODO: Remove after v0.18.0 release
+ msg = ('"spack buildcache copy" is deprecated and will be removed from '
+ 'Spack starting in v0.19.0')
+ warnings.warn(msg)
if not args.spec_file:
tty.msg('No spec yaml provided, exiting.')
@@ -845,7 +584,7 @@ def buildcache_copy(args):
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
-def buildcache_sync(args):
+def sync_fn(args):
""" Syncs binaries (and associated metadata) from one mirror to another.
Requires an active environment in order to know which specs to sync.
@@ -979,7 +718,7 @@ def update_index(mirror_url, update_keys=False):
bindist.generate_key_index(keys_url)
-def buildcache_update_index(args):
+def update_index_fn(args):
"""Update a buildcache index."""
outdir = '.'
if args.mirror_url:
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index b2a2bc5891..4796ed2500 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -25,17 +25,17 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '--keep-stage', action='store_true',
+ '--keep-stage', action='store_true', default=False,
help="don't clean up staging area when command completes")
sp = subparser.add_mutually_exclusive_group()
sp.add_argument(
- '-b', '--batch', action='store_true',
+ '-b', '--batch', action='store_true', default=False,
help="don't ask which versions to checksum")
sp.add_argument(
- '-l', '--latest', action='store_true',
+ '-l', '--latest', action='store_true', default=False,
help="checksum the latest available version only")
sp.add_argument(
- '-p', '--preferred', action='store_true',
+ '-p', '--preferred', action='store_true', default=False,
help="checksum the preferred version only")
arguments.add_common_arguments(subparser, ['package'])
subparser.add_argument(
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
index b8ed1fb27d..c0df5a2f40 100644
--- a/lib/spack/spack/cmd/ci.py
+++ b/lib/spack/spack/cmd/ci.py
@@ -397,8 +397,12 @@ def ci_rebuild(args):
tty.debug('Getting {0} buildcache from {1}'.format(
job_spec_pkg_name, matching_mirror))
tty.debug('Downloading to {0}'.format(build_cache_dir))
- buildcache.download_buildcache_files(
- job_spec, build_cache_dir, False, matching_mirror)
+ bindist.download_single_spec(
+ job_spec,
+ build_cache_dir,
+ require_cdashid=False,
+ mirror_url=matching_mirror
+ )
# Now we are done and successful
sys.exit(0)
@@ -433,17 +437,17 @@ def ci_rebuild(args):
cdash_build_name, cdash_base_url, cdash_project,
cdash_site, job_spec_buildgroup)
- if cdash_build_id is not None:
- cdash_upload_url = '{0}/submit.php?project={1}'.format(
- cdash_base_url, cdash_project_enc)
+ cdash_upload_url = '{0}/submit.php?project={1}'.format(
+ cdash_base_url, cdash_project_enc)
- install_args.extend([
- '--cdash-upload-url', cdash_upload_url,
- '--cdash-build', cdash_build_name,
- '--cdash-site', cdash_site,
- '--cdash-buildstamp', cdash_build_stamp,
- ])
+ install_args.extend([
+ '--cdash-upload-url', cdash_upload_url,
+ '--cdash-build', cdash_build_name,
+ '--cdash-site', cdash_site,
+ '--cdash-buildstamp', cdash_build_stamp,
+ ])
+ if cdash_build_id is not None:
tty.debug('CDash: Relating build with dependency builds')
spack_ci.relate_cdash_builds(
spec_map, cdash_base_url, cdash_build_id, cdash_project,
@@ -553,8 +557,8 @@ def ci_rebuild(args):
# per-PR mirror, if this is a PR pipeline
if buildcache_mirror_url:
spack_ci.push_mirror_contents(
- env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
- sign_binaries)
+ env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
+ )
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
@@ -568,8 +572,8 @@ def ci_rebuild(args):
# prefix is set)
if pipeline_mirror_url:
spack_ci.push_mirror_contents(
- env, job_spec, job_spec_yaml_path, pipeline_mirror_url,
- sign_binaries)
+ env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
+ )
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
index bea8ccea90..0aa30a15ca 100644
--- a/lib/spack/spack/cmd/common/arguments.py
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -328,3 +328,22 @@ def reuse():
'--reuse', action='store_true', default=False,
help='reuse installed dependencies'
)
+
+
+def add_s3_connection_args(subparser, add_help):
+ subparser.add_argument(
+ '--s3-access-key-id',
+ help="ID string to use to connect to this S3 mirror")
+ subparser.add_argument(
+ '--s3-access-key-secret',
+ help="Secret string to use to connect to this S3 mirror")
+ subparser.add_argument(
+ '--s3-access-token',
+ help="Access Token to use to connect to this S3 mirror")
+ subparser.add_argument(
+ '--s3-profile',
+ help="S3 profile name to use to connect to this S3 mirror",
+ default=None)
+ subparser.add_argument(
+ '--s3-endpoint-url',
+ help="Access Token to use to connect to this S3 mirror")
diff --git a/lib/spack/spack/cmd/containerize.py b/lib/spack/spack/cmd/containerize.py
index e22a5b4c4e..d3537d544c 100644
--- a/lib/spack/spack/cmd/containerize.py
+++ b/lib/spack/spack/cmd/containerize.py
@@ -50,7 +50,6 @@ def containerize(parser, args):
# If we have a monitor request, add monitor metadata to config
if args.use_monitor:
config['spack']['monitor'] = {
- "disable_auth": args.monitor_disable_auth,
"host": args.monitor_host,
"keep_going": args.monitor_keep_going,
"prefix": args.monitor_prefix,
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index bccf3bf66d..baeccc513e 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -316,6 +316,12 @@ class PythonPackageTemplate(PackageTemplate):
url = '/'.join([project, match.group(4)])
self.url_line = ' pypi = "{url}"'
+ else:
+ # Add a reminder about spack preferring PyPI URLs
+ self.url_line = '''
+ # FIXME: ensure the package is not available through PyPI. If it is,
+ # re-run `spack create --force` with the PyPI URL.
+''' + self.url_line
super(PythonPackageTemplate, self).__init__(name, url, *args, **kwargs)
diff --git a/lib/spack/spack/cmd/dev_build.py b/lib/spack/spack/cmd/dev_build.py
index 24c416e105..d2fc8df638 100644
--- a/lib/spack/spack/cmd/dev_build.py
+++ b/lib/spack/spack/cmd/dev_build.py
@@ -19,7 +19,7 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(subparser, ['jobs'])
+ arguments.add_common_arguments(subparser, ['jobs', 'reuse'])
subparser.add_argument(
'-d', '--source-path', dest='source_path', default=None,
help="path to source directory. defaults to the current directory")
@@ -86,7 +86,7 @@ def dev_build(self, args):
# Forces the build to run out of the source directory.
spec.constrain('dev_path=%s' % source_path)
- spec.concretize()
+ spec.concretize(reuse=args.reuse)
package = spack.repo.get(spec)
if package.installed:
diff --git a/lib/spack/spack/cmd/flake8.py b/lib/spack/spack/cmd/flake8.py
deleted file mode 100644
index 0579cb674a..0000000000
--- a/lib/spack/spack/cmd/flake8.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-from __future__ import print_function
-
-import llnl.util.tty as tty
-
-import spack.cmd.style
-
-description = "alias for spack style (deprecated)"
-section = spack.cmd.style.section
-level = spack.cmd.style.level
-
-
-def setup_parser(subparser):
- spack.cmd.style.setup_parser(subparser)
-
-
-def flake8(parser, args):
- tty.warn(
- "spack flake8 is deprecated", "please use `spack style` to run style checks"
- )
- return spack.cmd.style.style(parser, args)
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index f4a4644312..2c69069ef0 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -305,7 +305,6 @@ environment variables:
monitor = spack.monitor.get_client(
host=args.monitor_host,
prefix=args.monitor_prefix,
- disable_auth=args.monitor_disable_auth,
tags=args.monitor_tags,
save_local=args.monitor_save_local,
)
@@ -404,6 +403,10 @@ environment variables:
except SpackError as e:
tty.debug(e)
reporter.concretization_report(e.message)
+
+ # Tell spack monitor about it
+ if args.use_monitor and abstract_specs:
+ monitor.failed_concretization(abstract_specs)
raise
# 2. Concrete specs from yaml files
@@ -467,7 +470,6 @@ environment variables:
# Update install_args with the monitor args, needed for build task
kwargs.update({
- "monitor_disable_auth": args.monitor_disable_auth,
"monitor_keep_going": args.monitor_keep_going,
"monitor_host": args.monitor_host,
"use_monitor": args.use_monitor,
diff --git a/lib/spack/spack/cmd/license.py b/lib/spack/spack/cmd/license.py
index 3627b35391..82cbc3b2a7 100644
--- a/lib/spack/spack/cmd/license.py
+++ b/lib/spack/spack/cmd/license.py
@@ -46,9 +46,8 @@ licensed_files = [
r'^lib/spack/docs/.*\.py$',
r'^lib/spack/docs/spack.yaml$',
- # 2 files in external
+ # 1 file in external
r'^lib/spack/external/__init__.py$',
- r'^lib/spack/external/ordereddict_backport.py$',
# shell scripts in share
r'^share/spack/.*\.sh$',
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index fa202f09f0..a9e51f019d 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -6,7 +6,6 @@
import sys
import llnl.util.tty as tty
-from llnl.util.tty.colify import colify
import spack.cmd
import spack.cmd.common.arguments as arguments
@@ -93,7 +92,7 @@ def setup_parser(subparser):
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify")
-
+ arguments.add_s3_connection_args(add_parser, False)
# Remove
remove_parser = sp.add_parser('remove', aliases=['rm'],
help=mirror_remove.__doc__)
@@ -117,6 +116,7 @@ def setup_parser(subparser):
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify")
+ arguments.add_s3_connection_args(set_url_parser, False)
# List
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
@@ -129,7 +129,7 @@ def setup_parser(subparser):
def mirror_add(args):
"""Add a mirror to Spack."""
url = url_util.format(args.url)
- spack.mirror.add(args.name, url, args.scope)
+ spack.mirror.add(args.name, url, args.scope, args)
def mirror_remove(args):
@@ -140,7 +140,6 @@ def mirror_remove(args):
def mirror_set_url(args):
"""Change the URL of a mirror."""
url = url_util.format(args.url)
-
mirrors = spack.config.get('mirrors', scope=args.scope)
if not mirrors:
mirrors = syaml_dict()
@@ -149,7 +148,15 @@ def mirror_set_url(args):
tty.die("No mirror found with name %s." % args.name)
entry = mirrors[args.name]
-
+ key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
+
+ if any(value for value in key_values if value in args):
+ incoming_data = {"url": url,
+ "access_pair": (args.s3_access_key_id,
+ args.s3_access_key_secret),
+ "access_token": args.s3_access_token,
+ "profile": args.s3_profile,
+ "endpoint_url": args.s3_endpoint_url}
try:
fetch_url = entry['fetch']
push_url = entry['push']
@@ -159,20 +166,28 @@ def mirror_set_url(args):
changes_made = False
if args.push:
- changes_made = changes_made or push_url != url
- push_url = url
+ if isinstance(push_url, dict):
+ changes_made = changes_made or push_url != incoming_data
+ push_url = incoming_data
+ else:
+ changes_made = changes_made or push_url != url
+ push_url = url
else:
- changes_made = (
- changes_made or fetch_url != push_url or push_url != url)
-
- fetch_url, push_url = url, url
+ if isinstance(push_url, dict):
+ changes_made = (changes_made or push_url != incoming_data
+ or push_url != incoming_data)
+ fetch_url, push_url = incoming_data, incoming_data
+ else:
+ changes_made = changes_made or push_url != url
+ fetch_url, push_url = url, url
items = [
(
(n, u)
if n != args.name else (
(n, {"fetch": fetch_url, "push": push_url})
- if fetch_url != push_url else (n, fetch_url)
+ if fetch_url != push_url else (n, {"fetch": fetch_url,
+ "push": fetch_url})
)
)
for n, u in mirrors.items()
@@ -183,10 +198,10 @@ def mirror_set_url(args):
if changes_made:
tty.msg(
- "Changed%s url for mirror %s." %
+ "Changed%s url or connection information for mirror %s." %
((" (push)" if args.push else ""), args.name))
else:
- tty.msg("Url already set for mirror %s." % args.name)
+ tty.msg("No changes made to mirror %s." % args.name)
def mirror_list(args):
@@ -330,7 +345,7 @@ def mirror_create(args):
" %-4d failed to fetch." % e)
if error:
tty.error("Failed downloads:")
- colify(s.cformat("{name}{@version}") for s in error)
+ tty.colify(s.cformat("{name}{@version}") for s in error)
sys.exit(1)
diff --git a/lib/spack/spack/cmd/monitor.py b/lib/spack/spack/cmd/monitor.py
index 90371f446f..c395825ff9 100644
--- a/lib/spack/spack/cmd/monitor.py
+++ b/lib/spack/spack/cmd/monitor.py
@@ -27,7 +27,6 @@ def monitor(parser, args, **kwargs):
monitor = spack.monitor.get_client(
host=args.monitor_host,
prefix=args.monitor_prefix,
- disable_auth=args.monitor_disable_auth,
)
# Upload the directory
diff --git a/lib/spack/spack/cmd/style.py b/lib/spack/spack/cmd/style.py
index eb95904dfb..648f30bed0 100644
--- a/lib/spack/spack/cmd/style.py
+++ b/lib/spack/spack/cmd/style.py
@@ -48,10 +48,10 @@ exclude_directories = [
#: double-check the results of other tools (if, e.g., --fix was provided)
#: The list maps an executable name to a spack spec needed to install it.
tool_order = [
- ("isort", "py-isort@4.3.5:"),
- ("mypy", "py-mypy@0.900:"),
- ("black", "py-black"),
- ("flake8", "py-flake8"),
+ ("isort", spack.bootstrap.ensure_isort_in_path_or_raise),
+ ("mypy", spack.bootstrap.ensure_mypy_in_path_or_raise),
+ ("black", spack.bootstrap.ensure_black_in_path_or_raise),
+ ("flake8", spack.bootstrap.ensure_flake8_in_path_or_raise),
]
#: tools we run in spack style
@@ -387,40 +387,33 @@ def style(parser, args):
file_list = [prefix_relative(p) for p in file_list]
- returncode = 0
+ return_code = 0
with working_dir(args.root):
if not file_list:
file_list = changed_files(args.base, args.untracked, args.all)
print_style_header(file_list, args)
- # run tools in order defined in tool_order
- returncode = 0
- for tool_name, tool_spec in tool_order:
- if getattr(args, tool_name):
+ commands = {}
+ with spack.bootstrap.ensure_bootstrap_configuration():
+ for tool_name, bootstrap_fn in tool_order:
+ # Skip the tool if it was not requested
+ if not getattr(args, tool_name):
+ continue
+
+ commands[tool_name] = bootstrap_fn()
+
+ for tool_name, bootstrap_fn in tool_order:
+ # Skip the tool if it was not requested
+ if not getattr(args, tool_name):
+ continue
+
run_function, required = tools[tool_name]
print_tool_header(tool_name)
+ return_code |= run_function(commands[tool_name], file_list, args)
- try:
- # Bootstrap tools so we don't need to require install
- with spack.bootstrap.ensure_bootstrap_configuration():
- spec = spack.spec.Spec(tool_spec)
- cmd = None
- cmd = spack.bootstrap.get_executable(
- tool_name, spec=spec, install=True
- )
- if not cmd:
- color.cprint(" @y{%s not in PATH, skipped}" % tool_name)
- continue
- returncode |= run_function(cmd, file_list, args)
-
- except Exception as e:
- raise spack.error.SpackError(
- "Couldn't bootstrap %s:" % tool_name, str(e)
- )
-
- if returncode == 0:
+ if return_code == 0:
tty.msg(color.colorize("@*{spack style checks were clean}"))
else:
tty.error(color.colorize("@*{spack style found errors}"))
- return returncode
+ return return_code
diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py
index 7db451fdbb..e5c9439c8c 100644
--- a/lib/spack/spack/cmd/tutorial.py
+++ b/lib/spack/spack/cmd/tutorial.py
@@ -77,7 +77,9 @@ def tutorial(parser, args):
# Note that checkout MUST be last. It changes Spack under our feet.
# If you don't put this last, you'll get import errors for the code
# that follows (exacerbated by the various lazy singletons we use)
- tty.msg("Ensuring we're on the releases/v0.16 branch")
+ tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format(
+ *spack.spack_version_info[:2]
+ ))
git = which("git", required=True)
with working_dir(spack.paths.prefix):
git("checkout", tutorial_branch)
diff --git a/lib/spack/spack/cmd/unit_test.py b/lib/spack/spack/cmd/unit_test.py
index ce4e2cbdbb..0f117f2d7a 100644
--- a/lib/spack/spack/cmd/unit_test.py
+++ b/lib/spack/spack/cmd/unit_test.py
@@ -7,14 +7,19 @@ from __future__ import division, print_function
import argparse
import collections
+import os.path
import re
import sys
-import pytest
+try:
+ import pytest
+except ImportError:
+ pytest = None # type: ignore
+
from six import StringIO
+import llnl.util.filesystem
import llnl.util.tty.color as color
-from llnl.util.filesystem import working_dir
from llnl.util.tty.colify import colify
import spack.paths
@@ -67,7 +72,25 @@ def setup_parser(subparser):
def do_list(args, extra_args):
"""Print a lists of tests than what pytest offers."""
- # Run test collection and get the tree out.
+ def colorize(c, prefix):
+ if isinstance(prefix, tuple):
+ return "::".join(
+ color.colorize("@%s{%s}" % (c, p))
+ for p in prefix if p != "()"
+ )
+ return color.colorize("@%s{%s}" % (c, prefix))
+
+ # To list the files we just need to inspect the filesystem,
+ # which doesn't need to wait for pytest collection and doesn't
+ # require parsing pytest output
+ files = llnl.util.filesystem.find(
+ root=spack.paths.test_path, files='*.py', recursive=True
+ )
+ files = [
+ os.path.relpath(f, start=spack.paths.spack_root)
+ for f in files if not f.endswith(('conftest.py', '__init__.py'))
+ ]
+
old_output = sys.stdout
try:
sys.stdout = output = StringIO()
@@ -76,12 +99,13 @@ def do_list(args, extra_args):
sys.stdout = old_output
lines = output.getvalue().split('\n')
- tests = collections.defaultdict(lambda: set())
- prefix = []
+ tests = collections.defaultdict(set)
# collect tests into sections
+ node_regexp = re.compile(r"(\s*)<([^ ]*) ['\"]?([^']*)['\"]?>")
+ key_parts, name_parts = [], []
for line in lines:
- match = re.match(r"(\s*)<([^ ]*) '([^']*)'", line)
+ match = node_regexp.match(line)
if not match:
continue
indent, nodetype, name = match.groups()
@@ -90,25 +114,31 @@ def do_list(args, extra_args):
if "[" in name:
name = name[:name.index("[")]
- depth = len(indent) // 2
-
- if nodetype.endswith("Function"):
- key = tuple(prefix)
- tests[key].add(name)
- else:
- prefix = prefix[:depth]
- prefix.append(name)
-
- def colorize(c, prefix):
- if isinstance(prefix, tuple):
- return "::".join(
- color.colorize("@%s{%s}" % (c, p))
- for p in prefix if p != "()"
- )
- return color.colorize("@%s{%s}" % (c, prefix))
+ len_indent = len(indent)
+ if os.path.isabs(name):
+ name = os.path.relpath(name, start=spack.paths.spack_root)
+
+ item = (len_indent, name, nodetype)
+
+ # Reduce the parts to the scopes that are of interest
+ name_parts = [x for x in name_parts if x[0] < len_indent]
+ key_parts = [x for x in key_parts if x[0] < len_indent]
+
+ # From version 3.X to version 6.X the output format
+ # changed a lot in pytest, and probably will change
+ # in the future - so this manipulation might be fragile
+ if nodetype.lower() == 'function':
+ name_parts.append(item)
+ key_end = os.path.join(*[x[1] for x in key_parts])
+ key = next(f for f in files if f.endswith(key_end))
+ tests[key].add(tuple(x[1] for x in name_parts))
+ elif nodetype.lower() == 'class':
+ name_parts.append(item)
+ elif nodetype.lower() in ('package', 'module'):
+ key_parts.append(item)
if args.list == "list":
- files = set(prefix[0] for prefix in tests)
+ files = set(tests.keys())
color_files = [colorize("B", file) for file in sorted(files)]
colify(color_files)
@@ -144,6 +174,14 @@ def add_back_pytest_args(args, unknown_args):
def unit_test(parser, args, unknown_args):
+ global pytest
+ if pytest is None:
+ vendored_pytest_dir = os.path.join(
+ spack.paths.external_path, 'pytest-fallback'
+ )
+ sys.path.append(vendored_pytest_dir)
+ import pytest
+
if args.pytest_help:
# make the pytest.main help output more accurate
sys.argv[0] = 'spack unit-test'
@@ -161,7 +199,7 @@ def unit_test(parser, args, unknown_args):
pytest_root = spack.extensions.path_for_extension(target, *extensions)
# pytest.ini lives in the root of the spack repository.
- with working_dir(pytest_root):
+ with llnl.util.filesystem.working_dir(pytest_root):
if args.list:
do_list(args, pytest_args)
return
diff --git a/lib/spack/spack/compilers/dpcpp.py b/lib/spack/spack/compilers/dpcpp.py
new file mode 100644
index 0000000000..664b1d86c5
--- /dev/null
+++ b/lib/spack/spack/compilers/dpcpp.py
@@ -0,0 +1,29 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import spack.compilers.oneapi
+
+
+class Dpcpp(spack.compilers.oneapi.Oneapi):
+ """This is the same as the oneAPI compiler but uses dpcpp instead of
+ icpx (for DPC++ source files). It explicitly refers to dpcpp, so that
+ CMake test files which check the compiler name (e.g. CMAKE_CXX_COMPILER)
+ detect it as dpcpp.
+
+ Ideally we could switch out icpx for dpcpp where needed in the oneAPI
+ compiler definition, but two things are needed for that: (a) a way to
+ tell the compiler that it should be using dpcpp and (b) a way to
+ customize the link_paths
+
+ See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
+ """
+ # Subclasses use possible names of C++ compiler
+ cxx_names = ['dpcpp']
+
+ # Named wrapper links within build_env_path
+ link_paths = {'cc': 'oneapi/icx',
+ 'cxx': 'oneapi/dpcpp',
+ 'f77': 'oneapi/ifx',
+ 'fc': 'oneapi/ifx'}
diff --git a/lib/spack/spack/compilers/oneapi.py b/lib/spack/spack/compilers/oneapi.py
index a28259c02c..bf5c7767e9 100644
--- a/lib/spack/spack/compilers/oneapi.py
+++ b/lib/spack/spack/compilers/oneapi.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from os.path import dirname
+
from spack.compiler import Compiler
@@ -105,3 +107,11 @@ class Oneapi(Compiler):
@property
def stdcxx_libs(self):
return ('-cxxlib', )
+
+ def setup_custom_environment(self, pkg, env):
+ # workaround bug in icpx driver where it requires sycl-post-link is on the PATH
+ # It is located in the same directory as the driver. Error message:
+ # clang++: error: unable to execute command:
+ # Executable "sycl-post-link" doesn't exist!
+ if self.cxx:
+ env.prepend_path('PATH', dirname(self.cxx))
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index f3572cfdcf..90b4c5b669 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -16,14 +16,13 @@ TODO: make this customizable and allow users to configure
"""
from __future__ import print_function
+import functools
import os.path
import platform
import tempfile
from contextlib import contextmanager
from itertools import chain
-from functools_backport import reverse_order
-
import archspec.cpu
import llnl.util.filesystem as fs
@@ -48,6 +47,23 @@ from spack.version import Version, VersionList, VersionRange, ver
_abi = llnl.util.lang.Singleton(lambda: spack.abi.ABI())
+@functools.total_ordering
+class reverse_order(object):
+ """Helper for creating key functions.
+
+ This is a wrapper that inverts the sense of the natural
+ comparisons on the object.
+ """
+ def __init__(self, value):
+ self.value = value
+
+ def __eq__(self, other):
+ return other.value == self.value
+
+ def __lt__(self, other):
+ return other.value < self.value
+
+
class Concretizer(object):
"""You can subclass this class to override some of the default
concretization strategies, or you can override all of them.
@@ -130,11 +146,11 @@ class Concretizer(object):
# Use a sort key to order the results
return sorted(usable, key=lambda spec: (
- not spec.external, # prefer externals
- pref_key(spec), # respect prefs
- spec.name, # group by name
- reverse_order(spec.versions), # latest version
- spec # natural order
+ not spec.external, # prefer externals
+ pref_key(spec), # respect prefs
+ spec.name, # group by name
+ reverse_order(spec.versions), # latest version
+ spec # natural order
))
def choose_virtual_or_external(self, spec):
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index f3fc73e4b4..d967fef122 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -39,7 +39,6 @@ from contextlib import contextmanager
from typing import List # novm
import ruamel.yaml as yaml
-from ordereddict_backport import OrderedDict
from ruamel.yaml.error import MarkedYAMLError
from six import iteritems
@@ -401,7 +400,7 @@ class Configuration(object):
Configuration, ordered from lowest to highest precedence
"""
- self.scopes = OrderedDict()
+ self.scopes = collections.OrderedDict()
for scope in scopes:
self.push_scope(scope)
self.format_updates = collections.defaultdict(list)
diff --git a/lib/spack/spack/container/writers/__init__.py b/lib/spack/spack/container/writers/__init__.py
index 9808969bfc..abe6dbf211 100644
--- a/lib/spack/spack/container/writers/__init__.py
+++ b/lib/spack/spack/container/writers/__init__.py
@@ -183,19 +183,18 @@ class PathContext(tengine.Context):
def monitor(self):
"""Enable using spack monitor during build."""
Monitor = collections.namedtuple('Monitor', [
- 'enabled', 'host', 'disable_auth', 'prefix', 'keep_going', 'tags'
+ 'enabled', 'host', 'prefix', 'keep_going', 'tags'
])
monitor = self.config.get("monitor")
# If we don't have a monitor group, cut out early.
if not monitor:
- return Monitor(False, None, None, None, None, None)
+ return Monitor(False, None, None, None, None)
return Monitor(
enabled=True,
host=monitor.get('host'),
prefix=monitor.get('prefix'),
- disable_auth=monitor.get("disable_auth"),
keep_going=monitor.get("keep_going"),
tags=monitor.get('tags')
)
diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py
index 0e652ed6c2..66998e4b0b 100644
--- a/lib/spack/spack/detection/path.py
+++ b/lib/spack/spack/detection/path.py
@@ -9,6 +9,7 @@ import collections
import os
import os.path
import re
+import warnings
import llnl.util.filesystem
import llnl.util.tty
@@ -99,9 +100,14 @@ def by_executable(packages_to_check, path_hints=None):
# for one prefix, but without additional details (e.g. about the
# naming scheme which differentiates them), the spec won't be
# usable.
- specs = _convert_to_iterable(
- pkg.determine_spec_details(prefix, exes_in_prefix)
- )
+ try:
+ specs = _convert_to_iterable(
+ pkg.determine_spec_details(prefix, exes_in_prefix)
+ )
+ except Exception as e:
+ specs = []
+ msg = 'error detecting "{0}" from prefix {1} [{2}]'
+ warnings.warn(msg.format(pkg.name, prefix, str(e)))
if not specs:
llnl.util.tty.debug(
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index f7400f4da9..c23146125a 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -466,7 +466,8 @@ def extends(spec, type=('build', 'run'), **kwargs):
return
_depends_on(pkg, spec, when=when, type=type)
- pkg.extendees[spec] = (spack.spec.Spec(spec), kwargs)
+ spec_obj = spack.spec.Spec(spec)
+ pkg.extendees[spec_obj.name] = (spec_obj, kwargs)
return _execute_extends
diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py
index 3114f55638..67303c8a4d 100644
--- a/lib/spack/spack/environment/environment.py
+++ b/lib/spack/spack/environment/environment.py
@@ -13,7 +13,6 @@ import time
import ruamel.yaml as yaml
import six
-from ordereddict_backport import OrderedDict
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -287,6 +286,51 @@ def _eval_conditional(string):
return eval(string, valid_variables)
+def _is_dev_spec_and_has_changed(spec):
+ """Check if the passed spec is a dev build and whether it has changed since the
+ last installation"""
+ # First check if this is a dev build and in the process already try to get
+ # the dev_path
+ dev_path_var = spec.variants.get('dev_path', None)
+ if not dev_path_var:
+ return False
+
+ # Now we can check whether the code changed since the last installation
+ if not spec.package.installed:
+ # Not installed -> nothing to compare against
+ return False
+
+ _, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
+ mtime = fs.last_modification_time_recursive(dev_path_var.value)
+ return mtime > record.installation_time
+
+
+def _spec_needs_overwrite(spec, changed_dev_specs):
+ """Check whether the current spec needs to be overwritten because either it has
+ changed itself or one of its dependencies have changed"""
+ # if it's not installed, we don't need to overwrite it
+ if not spec.package.installed:
+ return False
+
+ # If the spec itself has changed this is a trivial decision
+ if spec in changed_dev_specs:
+ return True
+
+ # if spec and all deps aren't dev builds, we don't need to overwrite it
+ if not any(spec.satisfies(c)
+ for c in ('dev_path=*', '^dev_path=*')):
+ return False
+
+ # If any dep needs overwrite, or any dep is missing and is a dev build then
+ # overwrite this package
+ if any(
+ ((not dep.package.installed) and dep.satisfies('dev_path=*')) or
+ _spec_needs_overwrite(dep, changed_dev_specs)
+ for dep in spec.traverse(root=False)
+ ):
+ return True
+
+
class ViewDescriptor(object):
def __init__(self, base_path, root, projections={}, select=[], exclude=[],
link=default_view_link, link_type='symlink'):
@@ -318,7 +362,11 @@ class ViewDescriptor(object):
# projections guaranteed to be ordered dict if true-ish
# for python2.6, may be syaml or ruamel.yaml implementation
# so we have to check for both
- types = (OrderedDict, syaml.syaml_dict, yaml.comments.CommentedMap)
+ types = (
+ collections.OrderedDict,
+ syaml.syaml_dict,
+ yaml.comments.CommentedMap
+ )
assert isinstance(self.projections, types)
ret['projections'] = self.projections
if self.select:
@@ -638,7 +686,7 @@ class Environment(object):
else:
self.raw_yaml, self.yaml = _read_yaml(f)
- self.spec_lists = OrderedDict()
+ self.spec_lists = collections.OrderedDict()
for item in config_dict(self.yaml).get('definitions', []):
entry = copy.deepcopy(item)
@@ -1389,52 +1437,19 @@ class Environment(object):
self.concretized_order.append(h)
self.specs_by_hash[h] = concrete
- def _spec_needs_overwrite(self, spec):
- # Overwrite the install if it's a dev build (non-transitive)
- # and the code has been changed since the last install
- # or one of the dependencies has been reinstalled since
- # the last install
-
- # if it's not installed, we don't need to overwrite it
- if not spec.package.installed:
- return False
-
- # if spec and all deps aren't dev builds, we don't need to overwrite it
- if not any(spec.satisfies(c)
- for c in ('dev_path=*', '^dev_path=*')):
- return False
-
- # if any dep needs overwrite, or any dep is missing and is a dev build
- # then overwrite this package
- if any(
- self._spec_needs_overwrite(dep) or
- ((not dep.package.installed) and dep.satisfies('dev_path=*'))
- for dep in spec.traverse(root=False)
- ):
- return True
-
- # if it's not a direct dev build and its dependencies haven't
- # changed, it hasn't changed.
- # We don't merely check satisfaction (spec.satisfies('dev_path=*')
- # because we need the value of the variant in the next block of code
- dev_path_var = spec.variants.get('dev_path', None)
- if not dev_path_var:
- return False
-
- # if it is a direct dev build, check whether the code changed
- # we already know it is installed
- _, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
- mtime = fs.last_modification_time_recursive(dev_path_var.value)
- return mtime > record.installation_time
-
def _get_overwrite_specs(self):
- ret = []
+ # Collect all specs in the environment first before checking which ones
+ # to rebuild to avoid checking the same specs multiple times
+ specs_to_check = set()
for dag_hash in self.concretized_order:
- spec = self.specs_by_hash[dag_hash]
- ret.extend([d.dag_hash() for d in spec.traverse(root=True)
- if self._spec_needs_overwrite(d)])
+ root_spec = self.specs_by_hash[dag_hash]
+ specs_to_check.update(root_spec.traverse(root=True))
- return ret
+ changed_dev_specs = set(s for s in specs_to_check if
+ _is_dev_spec_and_has_changed(s))
+
+ return [s.dag_hash() for s in specs_to_check if
+ _spec_needs_overwrite(s, changed_dev_specs)]
def _install_log_links(self, spec):
if not spec.external:
@@ -1503,8 +1518,12 @@ class Environment(object):
else:
tty.debug('Processing {0} uninstalled specs'.format(len(specs_to_install)))
+ specs_to_overwrite = self._get_overwrite_specs()
+ tty.debug('{0} specs need to be overwritten'.format(
+ len(specs_to_overwrite)))
+
install_args['overwrite'] = install_args.get(
- 'overwrite', []) + self._get_overwrite_specs()
+ 'overwrite', []) + specs_to_overwrite
installs = []
for spec in specs_to_install:
diff --git a/lib/spack/spack/extensions.py b/lib/spack/spack/extensions.py
index 69a287fe85..0a1a056819 100644
--- a/lib/spack/spack/extensions.py
+++ b/lib/spack/spack/extensions.py
@@ -5,6 +5,7 @@
"""Service functions and classes to implement the hooks
for Spack's command extensions.
"""
+import importlib
import os
import re
import sys
@@ -98,9 +99,7 @@ def load_command_extension(command, path):
ensure_package_creation(extension)
ensure_package_creation(extension + '.cmd')
- # TODO: Upon removal of support for Python 2.6 substitute the call
- # TODO: below with importlib.import_module(module_name)
- module = llnl.util.lang.load_module_from_file(module_name, cmd_path)
+ module = importlib.import_module(module_name)
sys.modules[module_name] = module
return module
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 888cad7bf3..432010adca 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -1437,7 +1437,13 @@ class GCSFetchStrategy(URLFetchStrategy):
basename = os.path.basename(parsed_url.path)
with working_dir(self.stage.path):
- _, headers, stream = web_util.read_from_url(self.url)
+ import spack.util.s3 as s3_util
+ s3 = s3_util.create_s3_session(self.url,
+ connection=s3_util.get_mirror_connection(parsed_url), url_type="fetch") # noqa: E501
+
+ headers = s3.get_object(Bucket=parsed_url.netloc,
+ Key=parsed_url.path.lstrip("/"))
+ stream = headers["Body"]
with open(basename, 'wb') as f:
shutil.copyfileobj(stream, f)
diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py
index 659ae550ae..741e38490a 100644
--- a/lib/spack/spack/filesystem_view.py
+++ b/lib/spack/spack/filesystem_view.py
@@ -3,14 +3,13 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import collections
import functools as ft
import os
import re
import shutil
import sys
-from ordereddict_backport import OrderedDict
-
from llnl.util import tty
from llnl.util.filesystem import mkdirp, remove_dead_links, remove_empty_directories
from llnl.util.lang import index_by, match_predicate
@@ -79,7 +78,7 @@ def view_copy(src, dst, view, spec=None):
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(spack.paths.spack_root)
new_sbang = sbang.sbang_shebang_line()
- prefix_to_projection = OrderedDict({
+ prefix_to_projection = collections.OrderedDict({
spec.prefix: view.get_projection_for_spec(spec)})
for dep in spec.traverse():
diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py
index e802fa0197..ca6a5fade9 100644
--- a/lib/spack/spack/hooks/__init__.py
+++ b/lib/spack/spack/hooks/__init__.py
@@ -90,6 +90,7 @@ on_phase_error = _HookRunner('on_phase_error')
on_install_start = _HookRunner('on_install_start')
on_install_success = _HookRunner('on_install_success')
on_install_failure = _HookRunner('on_install_failure')
+on_install_cancel = _HookRunner('on_install_cancel')
# Analyzer hooks
on_analyzer_save = _HookRunner('on_analyzer_save')
diff --git a/lib/spack/spack/hooks/monitor.py b/lib/spack/spack/hooks/monitor.py
index 5ddc1223e8..9da5012593 100644
--- a/lib/spack/spack/hooks/monitor.py
+++ b/lib/spack/spack/hooks/monitor.py
@@ -41,6 +41,17 @@ def on_install_failure(spec):
tty.verbose(result.get('message'))
+def on_install_cancel(spec):
+ """Triggered on cancel of an install
+ """
+ if not spack.monitor.cli:
+ return
+
+ tty.debug("Running on_install_cancel for %s" % spec)
+ result = spack.monitor.cli.cancel_task(spec)
+ tty.verbose(result.get('message'))
+
+
def on_phase_success(pkg, phase_name, log_file):
"""Triggered on a phase success
"""
diff --git a/lib/spack/spack/install_test.py b/lib/spack/spack/install_test.py
index 07644f5cc5..a8fe0f4024 100644
--- a/lib/spack/spack/install_test.py
+++ b/lib/spack/spack/install_test.py
@@ -287,7 +287,10 @@ class TestSuite(object):
try:
with open(filename, 'r') as f:
data = sjson.load(f)
- return TestSuite.from_dict(data)
+ test_suite = TestSuite.from_dict(data)
+ content_hash = os.path.basename(os.path.dirname(filename))
+ test_suite._hash = content_hash
+ return test_suite
except Exception as e:
tty.debug(e)
raise sjson.SpackJSONError("error parsing JSON TestSuite:", str(e))
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index df1d704cd7..d70f535a89 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -1200,6 +1200,7 @@ class PackageInstaller(object):
except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
+ spack.hooks.on_install_failure(task.request.pkg.spec)
pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
tty.debug('{0}{1}'.format(pid, str(e)))
tty.debug('Package stage directory: {0}' .format(pkg.stage.source_path))
@@ -1657,7 +1658,7 @@ class PackageInstaller(object):
err = 'Failed to install {0} due to {1}: {2}'
tty.error(err.format(pkg.name, exc.__class__.__name__,
str(exc)))
- spack.hooks.on_install_failure(task.request.pkg.spec)
+ spack.hooks.on_install_cancel(task.request.pkg.spec)
raise
except (Exception, SystemExit) as exc:
@@ -1921,6 +1922,9 @@ class BuildProcessInstaller(object):
except BaseException:
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
spack.hooks.on_phase_error(pkg, phase_name, log_file)
+
+ # phase error indicates install error
+ spack.hooks.on_install_failure(pkg.spec)
raise
# We assume loggers share echo True/False
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index a4ec57ab5b..9b6fd5ba3c 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -11,6 +11,7 @@ the main server for a particular package is down. Or, if the computer
where spack is run is not connected to the internet, it allows spack
to download packages directly from a mirror (e.g., on an intranet).
"""
+import collections
import operator
import os
import os.path
@@ -19,7 +20,6 @@ import traceback
import ruamel.yaml.error as yaml_error
import six
-from ordereddict_backport import OrderedDict
if sys.version_info >= (3, 5):
from collections.abc import Mapping # novm
@@ -41,6 +41,10 @@ from spack.util.spack_yaml import syaml_dict
from spack.version import VersionList
+def _is_string(url):
+ return isinstance(url, six.string_types)
+
+
def _display_mirror_entry(size, name, url, type_=None):
if type_:
type_ = "".join((" (", type_, ")"))
@@ -59,7 +63,8 @@ class Mirror(object):
to them. These two URLs are usually the same.
"""
- def __init__(self, fetch_url, push_url=None, name=None):
+ def __init__(self, fetch_url, push_url=None,
+ name=None):
self._fetch_url = fetch_url
self._push_url = push_url
self._name = name
@@ -85,7 +90,9 @@ class Mirror(object):
def to_dict(self):
if self._push_url is None:
- return self._fetch_url
+ return syaml_dict([
+ ('fetch', self._fetch_url),
+ ('push', self._fetch_url)])
else:
return syaml_dict([
('fetch', self._fetch_url),
@@ -96,16 +103,16 @@ class Mirror(object):
if isinstance(d, six.string_types):
return Mirror(d, name=name)
else:
- return Mirror(d['fetch'], d['push'], name)
+ return Mirror(d['fetch'], d['push'], name=name)
def display(self, max_len=0):
if self._push_url is None:
- _display_mirror_entry(max_len, self._name, self._fetch_url)
+ _display_mirror_entry(max_len, self._name, self.fetch_url)
else:
_display_mirror_entry(
- max_len, self._name, self._fetch_url, "fetch")
+ max_len, self._name, self.fetch_url, "fetch")
_display_mirror_entry(
- max_len, self._name, self._push_url, "push")
+ max_len, self._name, self.push_url, "push")
def __str__(self):
name = self._name
@@ -137,24 +144,83 @@ class Mirror(object):
def name(self):
return self._name or "<unnamed>"
+ def get_profile(self, url_type):
+ if isinstance(self._fetch_url, dict):
+ if url_type == "push":
+ return self._push_url.get('profile', None)
+ return self._fetch_url.get('profile', None)
+ else:
+ return None
+
+ def set_profile(self, url_type, profile):
+ if url_type == "push":
+ self._push_url["profile"] = profile
+ else:
+ self._fetch_url["profile"] = profile
+
+ def get_access_pair(self, url_type):
+ if isinstance(self._fetch_url, dict):
+ if url_type == "push":
+ return self._push_url.get('access_pair', None)
+ return self._fetch_url.get('access_pair', None)
+ else:
+ return None
+
+ def set_access_pair(self, url_type, connection_tuple):
+ if url_type == "push":
+ self._push_url["access_pair"] = connection_tuple
+ else:
+ self._fetch_url["access_pair"] = connection_tuple
+
+ def get_endpoint_url(self, url_type):
+ if isinstance(self._fetch_url, dict):
+ if url_type == "push":
+ return self._push_url.get('endpoint_url', None)
+ return self._fetch_url.get('endpoint_url', None)
+ else:
+ return None
+
+ def set_endpoint_url(self, url_type, url):
+ if url_type == "push":
+ self._push_url["endpoint_url"] = url
+ else:
+ self._fetch_url["endpoint_url"] = url
+
+ def get_access_token(self, url_type):
+ if isinstance(self._fetch_url, dict):
+ if url_type == "push":
+ return self._push_url.get('access_token', None)
+ return self._fetch_url.get('access_token', None)
+ else:
+ return None
+
+ def set_access_token(self, url_type, connection_token):
+ if url_type == "push":
+ self._push_url["access_token"] = connection_token
+ else:
+ self._fetch_url["access_token"] = connection_token
+
@property
def fetch_url(self):
- return self._fetch_url
+ return self._fetch_url if _is_string(self._fetch_url) \
+ else self._fetch_url["url"]
@fetch_url.setter
def fetch_url(self, url):
- self._fetch_url = url
+ self._fetch_url["url"] = url
self._normalize()
@property
def push_url(self):
if self._push_url is None:
- return self._fetch_url
- return self._push_url
+ return self._fetch_url if _is_string(self._fetch_url) \
+ else self._fetch_url["url"]
+ return self._push_url if _is_string(self._push_url) \
+ else self._push_url["url"]
@push_url.setter
def push_url(self, url):
- self._push_url = url
+ self._push_url["url"] = url
self._normalize()
def _normalize(self):
@@ -166,7 +232,7 @@ class MirrorCollection(Mapping):
"""A mapping of mirror names to mirrors."""
def __init__(self, mirrors=None, scope=None):
- self._mirrors = OrderedDict(
+ self._mirrors = collections.OrderedDict(
(name, Mirror.from_dict(mirror, name))
for name, mirror in (
mirrors.items() if mirrors is not None else
@@ -453,7 +519,7 @@ def create(path, specs, skip_unstable_versions=False):
return mirror_stats.stats()
-def add(name, url, scope):
+def add(name, url, scope, args={}):
"""Add a named mirror in the given scope"""
mirrors = spack.config.get('mirrors', scope=scope)
if not mirrors:
@@ -463,7 +529,18 @@ def add(name, url, scope):
tty.die("Mirror with name %s already exists." % name)
items = [(n, u) for n, u in mirrors.items()]
- items.insert(0, (name, url))
+ mirror_data = url
+ key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
+ # On creation, assume connection data is set for both
+ if any(value for value in key_values if value in args):
+ url_dict = {"url": url,
+ "access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
+ "access_token": args.s3_access_token,
+ "profile": args.s3_profile,
+ "endpoint_url": args.s3_endpoint_url}
+ mirror_data = {"fetch": url_dict, "push": url_dict}
+
+ items.insert(0, (name, mirror_data))
mirrors = syaml_dict(items)
spack.config.set('mirrors', mirrors, scope=scope)
@@ -569,6 +646,35 @@ def _add_single_spec(spec, mirror, mirror_stats):
mirror_stats.error()
+def push_url_from_directory(output_directory):
+ """Given a directory in the local filesystem, return the URL on
+ which to push binary packages.
+ """
+ scheme = url_util.parse(output_directory, scheme='<missing>').scheme
+ if scheme != '<missing>':
+ raise ValueError('expected a local path, but got a URL instead')
+ mirror_url = 'file://' + output_directory
+ mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
+ return url_util.format(mirror.push_url)
+
+
+def push_url_from_mirror_name(mirror_name):
+ """Given a mirror name, return the URL on which to push binary packages."""
+ mirror = spack.mirror.MirrorCollection().lookup(mirror_name)
+ if mirror.name == "<unnamed>":
+ raise ValueError('no mirror named "{0}"'.format(mirror_name))
+ return url_util.format(mirror.push_url)
+
+
+def push_url_from_mirror_url(mirror_url):
+ """Given a mirror URL, return the URL on which to push binary packages."""
+ scheme = url_util.parse(mirror_url, scheme='<missing>').scheme
+ if scheme == '<missing>':
+ raise ValueError('"{0}" is not a valid URL'.format(mirror_url))
+ mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
+ return url_util.format(mirror.push_url)
+
+
class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors."""
diff --git a/lib/spack/spack/monitor.py b/lib/spack/spack/monitor.py
index c0df4ea680..dd967d793d 100644
--- a/lib/spack/spack/monitor.py
+++ b/lib/spack/spack/monitor.py
@@ -38,8 +38,7 @@ import spack.util.spack_yaml as syaml
cli = None
-def get_client(host, prefix="ms1", disable_auth=False, allow_fail=False, tags=None,
- save_local=False):
+def get_client(host, prefix="ms1", allow_fail=False, tags=None, save_local=False):
"""
Get a monitor client for a particular host and prefix.
@@ -57,8 +56,8 @@ def get_client(host, prefix="ms1", disable_auth=False, allow_fail=False, tags=No
cli = SpackMonitorClient(host=host, prefix=prefix, allow_fail=allow_fail,
tags=tags, save_local=save_local)
- # If we don't disable auth, environment credentials are required
- if not disable_auth and not save_local:
+ # Auth is always required unless we are saving locally
+ if not save_local:
cli.require_auth()
# We will exit early if the monitoring service is not running, but
@@ -93,9 +92,6 @@ def get_monitor_group(subparser):
'--monitor-save-local', action='store_true', dest='monitor_save_local',
default=False, help="save monitor results to .spack instead of server.")
monitor_group.add_argument(
- '--monitor-no-auth', action='store_true', dest='monitor_disable_auth',
- default=False, help="the monitoring server does not require auth.")
- monitor_group.add_argument(
'--monitor-tags', dest='monitor_tags', default=None,
help="One or more (comma separated) tags for a build.")
monitor_group.add_argument(
@@ -122,13 +118,16 @@ class SpackMonitorClient:
def __init__(self, host=None, prefix="ms1", allow_fail=False, tags=None,
save_local=False):
+ # We can control setting an arbitrary version if needed
+ sv = spack.main.get_version()
+ self.spack_version = os.environ.get("SPACKMON_SPACK_VERSION") or sv
+
self.host = host or "http://127.0.0.1"
self.baseurl = "%s/%s" % (self.host, prefix.strip("/"))
self.token = os.environ.get("SPACKMON_TOKEN")
self.username = os.environ.get("SPACKMON_USER")
self.headers = {}
self.allow_fail = allow_fail
- self.spack_version = spack.main.get_version()
self.capture_build_environment()
self.tags = tags
self.save_local = save_local
@@ -204,6 +203,14 @@ class SpackMonitorClient:
"""
from spack.util.environment import get_host_environment_metadata
self.build_environment = get_host_environment_metadata()
+ keys = list(self.build_environment.keys())
+
+ # Allow to customize any of these values via the environment
+ for key in keys:
+ envar_name = "SPACKMON_%s" % key.upper()
+ envar = os.environ.get(envar_name)
+ if envar:
+ self.build_environment[key] = envar
def require_auth(self):
"""
@@ -417,6 +424,37 @@ class SpackMonitorClient:
return configs
+ def failed_concretization(self, specs):
+ """
+ Given a list of abstract specs, tell spack monitor concretization failed.
+ """
+ configs = {}
+
+ # There should only be one spec generally (what cases would have >1?)
+ for spec in specs:
+
+ # update the spec to have build hash indicating that cannot be built
+ meta = spec.to_dict()['spec']
+ nodes = []
+ for node in meta.get("nodes", []):
+ for hashtype in ["build_hash", "full_hash"]:
+ node[hashtype] = "FAILED_CONCRETIZATION"
+ nodes.append(node)
+ meta['nodes'] = nodes
+
+ # We can't concretize / hash
+ as_dict = {"spec": meta,
+ "spack_version": self.spack_version}
+
+ if self.save_local:
+ filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
+ self.save(as_dict, filename)
+ else:
+ response = self.do_request("specs/new/", data=sjson.dump(as_dict))
+ configs[spec.package.name] = response.get('data', {})
+
+ return configs
+
def new_build(self, spec):
"""
Create a new build.
@@ -507,6 +545,11 @@ class SpackMonitorClient:
"""
return self.update_build(spec, status="FAILED")
+ def cancel_task(self, spec):
+ """Given a spec, mark it as cancelled.
+ """
+ return self.update_build(spec, status="CANCELLED")
+
def send_analyze_metadata(self, pkg, metadata):
"""
Send spack analyzer metadata to the spack monitor server.
diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py
index 758e8bea47..8bb2aa7749 100644
--- a/lib/spack/spack/operating_systems/linux_distro.py
+++ b/lib/spack/spack/operating_systems/linux_distro.py
@@ -18,10 +18,8 @@ class LinuxDistro(OperatingSystem):
def __init__(self):
try:
# This will throw an error if imported on a non-Linux platform.
- from external.distro import linux_distribution
- distname, version, _ = linux_distribution(
- full_distribution_name=False)
- distname, version = str(distname), str(version)
+ import external.distro
+ distname, version = external.distro.id(), external.distro.version()
except ImportError:
distname, version = 'unknown', ''
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 8e1e81c124..77a881c442 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -29,7 +29,6 @@ import types
from typing import Any, Callable, Dict, List, Optional # novm
import six
-from ordereddict_backport import OrderedDict
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
@@ -902,7 +901,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
explicitly defined ``url`` argument. So, this list may be empty
if a package only defines ``url`` at the top level.
"""
- version_urls = OrderedDict()
+ version_urls = collections.OrderedDict()
for v, args in sorted(self.versions.items()):
if 'url' in args:
version_urls[v] = args['url']
diff --git a/lib/spack/spack/pkgkit.py b/lib/spack/spack/pkgkit.py
index fc056f14ad..19c5fac500 100644
--- a/lib/spack/spack/pkgkit.py
+++ b/lib/spack/spack/pkgkit.py
@@ -29,7 +29,11 @@ from spack.build_systems.makefile import MakefilePackage
from spack.build_systems.maven import MavenPackage
from spack.build_systems.meson import MesonPackage
from spack.build_systems.octave import OctavePackage
-from spack.build_systems.oneapi import IntelOneApiLibraryPackage, IntelOneApiPackage
+from spack.build_systems.oneapi import (
+ IntelOneApiLibraryPackage,
+ IntelOneApiPackage,
+ IntelOneApiStaticLibraryList,
+)
from spack.build_systems.perl import PerlPackage
from spack.build_systems.python import PythonPackage
from spack.build_systems.qmake import QMakePackage
diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py
index 14bc0e1953..9cf01d7c9c 100644
--- a/lib/spack/spack/relocate.py
+++ b/lib/spack/spack/relocate.py
@@ -2,19 +2,19 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import collections
import multiprocessing.pool
import os
import re
import shutil
-from collections import defaultdict
import macholib.mach_o
import macholib.MachO
-from ordereddict_backport import OrderedDict
import llnl.util.lang
import llnl.util.tty as tty
+import spack.bootstrap
import spack.platforms
import spack.repo
import spack.spec
@@ -76,32 +76,16 @@ class BinaryTextReplaceError(spack.error.SpackError):
def _patchelf():
- """Return the full path to the patchelf binary, if available, else None.
-
- Search first the current PATH for patchelf. If not found, try to look
- if the default patchelf spec is installed and if not install it.
-
- Return None on Darwin or if patchelf cannot be found.
- """
- # Check if patchelf is already in the PATH
- patchelf = executable.which('patchelf')
- if patchelf is not None:
- return patchelf.path
-
- # Check if patchelf spec is installed
- spec = spack.spec.Spec('patchelf')
- spec._old_concretize(deprecation_warning=False)
- exe_path = os.path.join(spec.prefix.bin, "patchelf")
- if spec.package.installed and os.path.exists(exe_path):
- return exe_path
-
- # Skip darwin
+ """Return the full path to the patchelf binary, if available, else None."""
if is_macos:
return None
- # Install the spec and return its path
- spec.package.do_install()
- return exe_path if os.path.exists(exe_path) else None
+ patchelf = executable.which('patchelf')
+ if patchelf is None:
+ with spack.bootstrap.ensure_bootstrap_configuration():
+ patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()
+
+ return patchelf.path
def _elf_rpaths_for(path):
@@ -807,7 +791,7 @@ def relocate_text(files, prefixes, concurrency=32):
# orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
# new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
- compiled_prefixes = OrderedDict({})
+ compiled_prefixes = collections.OrderedDict({})
for orig_prefix, new_prefix in prefixes.items():
if orig_prefix != new_prefix:
@@ -845,7 +829,7 @@ def relocate_text_bin(binaries, prefixes, concurrency=32):
Raises:
BinaryTextReplaceError: when the new path is longer than the old path
"""
- byte_prefixes = OrderedDict({})
+ byte_prefixes = collections.OrderedDict({})
for orig_prefix, new_prefix in prefixes.items():
if orig_prefix != new_prefix:
@@ -1032,7 +1016,7 @@ def fixup_macos_rpath(root, filename):
# Convert rpaths list to (name -> number of occurrences)
add_rpaths = set()
del_rpaths = set()
- rpaths = defaultdict(int)
+ rpaths = collections.defaultdict(int)
for rpath in rpath_list:
rpaths[rpath] += 1
diff --git a/lib/spack/spack/reporters/cdash.py b/lib/spack/spack/reporters/cdash.py
index 19721fc676..73c75a1e18 100644
--- a/lib/spack/spack/reporters/cdash.py
+++ b/lib/spack/spack/reporters/cdash.py
@@ -2,9 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-
import codecs
+import collections
import hashlib
import os.path
import platform
@@ -13,7 +12,6 @@ import socket
import time
import xml.sax.saxutils
-from ordereddict_backport import OrderedDict
from six import iteritems, text_type
from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import HTTPHandler, Request, build_opener
@@ -96,7 +94,7 @@ class CDash(Reporter):
buildstamp_format = "%Y%m%d-%H%M-{0}".format(args.cdash_track)
self.buildstamp = time.strftime(buildstamp_format,
time.localtime(self.endtime))
- self.buildIds = OrderedDict()
+ self.buildIds = collections.OrderedDict()
self.revision = ''
git = which('git')
with working_dir(spack.paths.spack_root):
diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py
index 8f9322716a..3841287946 100644
--- a/lib/spack/spack/s3_handler.py
+++ b/lib/spack/spack/s3_handler.py
@@ -41,7 +41,8 @@ class WrapStream(BufferedReader):
def _s3_open(url):
parsed = url_util.parse(url)
- s3 = s3_util.create_s3_session(parsed)
+ s3 = s3_util.create_s3_session(parsed,
+ connection=s3_util.get_mirror_connection(parsed)) # noqa: E501
bucket = parsed.netloc
key = parsed.path
diff --git a/lib/spack/spack/schema/bootstrap.py b/lib/spack/spack/schema/bootstrap.py
index bd3c6630fb..e77001e854 100644
--- a/lib/spack/spack/schema/bootstrap.py
+++ b/lib/spack/spack/schema/bootstrap.py
@@ -39,7 +39,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack bootstrap configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/buildcache_spec.py b/lib/spack/spack/schema/buildcache_spec.py
index 563235c311..a72c70a59c 100644
--- a/lib/spack/spack/schema/buildcache_spec.py
+++ b/lib/spack/spack/schema/buildcache_spec.py
@@ -11,7 +11,7 @@
import spack.schema.spec
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack buildcache specfile schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/cdash.py b/lib/spack/spack/schema/cdash.py
index 41c0146b1d..b236159629 100644
--- a/lib/spack/spack/schema/cdash.py
+++ b/lib/spack/spack/schema/cdash.py
@@ -28,7 +28,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack cdash configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/compilers.py b/lib/spack/spack/schema/compilers.py
index 91ace05f4e..df32036491 100644
--- a/lib/spack/spack/schema/compilers.py
+++ b/lib/spack/spack/schema/compilers.py
@@ -84,7 +84,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack compiler configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py
index 0cf533eb18..801c6d5ea5 100644
--- a/lib/spack/spack/schema/config.py
+++ b/lib/spack/spack/schema/config.py
@@ -110,7 +110,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack core configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/database_index.py b/lib/spack/spack/schema/database_index.py
index 105f6ad6a3..7a6143d555 100644
--- a/lib/spack/spack/schema/database_index.py
+++ b/lib/spack/spack/schema/database_index.py
@@ -14,7 +14,7 @@ import spack.schema.spec
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack spec schema',
'type': 'object',
'required': ['database'],
diff --git a/lib/spack/spack/schema/env.py b/lib/spack/spack/schema/env.py
index de6a5d9568..5a9bfe9aa1 100644
--- a/lib/spack/spack/schema/env.py
+++ b/lib/spack/spack/schema/env.py
@@ -52,7 +52,7 @@ spec_list_schema = {
projections_scheme = spack.schema.projections.properties['projections']
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack environment file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/gitlab_ci.py b/lib/spack/spack/schema/gitlab_ci.py
index d6d8f564a3..4e5abe397b 100644
--- a/lib/spack/spack/schema/gitlab_ci.py
+++ b/lib/spack/spack/schema/gitlab_ci.py
@@ -153,7 +153,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack gitlab-ci configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/merged.py b/lib/spack/spack/schema/merged.py
index 11db4f78df..bbc288cb7a 100644
--- a/lib/spack/spack/schema/merged.py
+++ b/lib/spack/spack/schema/merged.py
@@ -40,7 +40,7 @@ properties = union_dicts(
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack merged configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/mirrors.py b/lib/spack/spack/schema/mirrors.py
index 6dec5aac97..38cb126d6c 100644
--- a/lib/spack/spack/schema/mirrors.py
+++ b/lib/spack/spack/schema/mirrors.py
@@ -24,8 +24,8 @@ properties = {
'type': 'object',
'required': ['fetch', 'push'],
'properties': {
- 'fetch': {'type': 'string'},
- 'push': {'type': 'string'}
+ 'fetch': {'type': ['string', 'object']},
+ 'push': {'type': ['string', 'object']}
}
}
]
@@ -37,7 +37,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack mirror configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py
index 90755f5588..4b30ae582f 100644
--- a/lib/spack/spack/schema/modules.py
+++ b/lib/spack/spack/schema/modules.py
@@ -219,7 +219,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack module file configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py
index 86aabc6649..55ed1bbf3b 100644
--- a/lib/spack/spack/schema/packages.py
+++ b/lib/spack/spack/schema/packages.py
@@ -154,7 +154,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack package configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/projections.py b/lib/spack/spack/schema/projections.py
index cab512fe3b..7623a5be6d 100644
--- a/lib/spack/spack/schema/projections.py
+++ b/lib/spack/spack/schema/projections.py
@@ -25,7 +25,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack view projection configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/repos.py b/lib/spack/spack/schema/repos.py
index 44cf688a48..d16c1b07bd 100644
--- a/lib/spack/spack/schema/repos.py
+++ b/lib/spack/spack/schema/repos.py
@@ -22,7 +22,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack repository configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/spec.py b/lib/spack/spack/schema/spec.py
index 3dd84553f7..5d2618bd80 100644
--- a/lib/spack/spack/schema/spec.py
+++ b/lib/spack/spack/schema/spec.py
@@ -192,7 +192,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack spec schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/schema/upstreams.py b/lib/spack/spack/schema/upstreams.py
index 7d721332eb..5075059d58 100644
--- a/lib/spack/spack/schema/upstreams.py
+++ b/lib/spack/spack/schema/upstreams.py
@@ -31,7 +31,7 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/schema#',
+ '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack core configuration file schema',
'type': 'object',
'additionalProperties': False,
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index eaf7544e83..8523dd6838 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1864,6 +1864,15 @@ class Spec(object):
return sjson.dump(self.to_dict(hash), stream)
@staticmethod
+ def from_specfile(path):
+ """Construct a spec from aJSON or YAML spec file path"""
+ with open(path, 'r') as fd:
+ file_content = fd.read()
+ if path.endswith('.json'):
+ return Spec.from_json(file_content)
+ return Spec.from_yaml(file_content)
+
+ @staticmethod
def from_node_dict(node):
spec = Spec()
if 'name' in node.keys():
@@ -3921,7 +3930,7 @@ class Spec(object):
elif 'version' in parts:
col = '@'
- # Finally, write the ouptut
+ # Finally, write the output
write(sig + morph(spec, str(current)), col)
attribute = ''
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index bffd06ab73..ceff320d6e 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -437,11 +437,20 @@ class Stage(object):
# Join URLs of mirror roots with mirror paths. Because
# urljoin() will strip everything past the final '/' in
# the root, so we add a '/' if it is not present.
- mirror_urls = []
+ mirror_urls = {}
for mirror in spack.mirror.MirrorCollection().values():
for rel_path in self.mirror_paths:
- mirror_urls.append(
- url_util.join(mirror.fetch_url, rel_path))
+ mirror_url = url_util.join(mirror.fetch_url, rel_path)
+ mirror_urls[mirror_url] = {}
+ if mirror.get_access_pair("fetch") or \
+ mirror.get_access_token("fetch") or \
+ mirror.get_profile("fetch"):
+ mirror_urls[mirror_url] = {
+ "access_token": mirror.get_access_token("fetch"),
+ "access_pair": mirror.get_access_pair("fetch"),
+ "access_profile": mirror.get_profile("fetch"),
+ "endpoint_url": mirror.get_endpoint_url("fetch")
+ }
# If this archive is normally fetched from a tarball URL,
# then use the same digest. `spack mirror` ensures that
@@ -460,10 +469,11 @@ class Stage(object):
# Add URL strategies for all the mirrors with the digest
# Insert fetchers in the order that the URLs are provided.
- for url in reversed(mirror_urls):
+ for url in reversed(list(mirror_urls.keys())):
fetchers.insert(
0, fs.from_url_scheme(
- url, digest, expand=expand, extension=extension))
+ url, digest, expand=expand, extension=extension,
+ connection=mirror_urls[url]))
if self.default_fetcher.cachable:
for rel_path in reversed(list(self.mirror_paths)):
diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py
index b0a418c7e3..1b372b848c 100644
--- a/lib/spack/spack/store.py
+++ b/lib/spack/spack/store.py
@@ -29,6 +29,7 @@ import llnl.util.tty as tty
import spack.config
import spack.database
import spack.directory_layout
+import spack.error
import spack.paths
import spack.util.path
@@ -284,6 +285,75 @@ def _construct_upstream_dbs_from_install_roots(
return accumulated_upstream_dbs
+def find(constraints, multiple=False, query_fn=None, **kwargs):
+ """Return a list of specs matching the constraints passed as inputs.
+
+ At least one spec per constraint must match, otherwise the function
+ will error with an appropriate message.
+
+ By default, this function queries the current store, but a custom query
+ function can be passed to hit any other source of concretized specs
+ (e.g. a binary cache).
+
+ The query function must accept a spec as its first argument.
+
+ Args:
+ constraints (List[spack.spec.Spec]): specs to be matched against
+ installed packages
+ multiple (bool): if True multiple matches per constraint are admitted
+ query_fn (Callable): query function to get matching specs. By default,
+ ``spack.store.db.query``
+ **kwargs: keyword arguments forwarded to the query function
+
+ Return:
+ List of matching specs
+ """
+ # Normalize input to list of specs
+ if isinstance(constraints, six.string_types):
+ constraints = [spack.spec.Spec(constraints)]
+
+ matching_specs, errors = [], []
+ query_fn = query_fn or spack.store.db.query
+ for spec in constraints:
+ current_matches = query_fn(spec, **kwargs)
+
+ # For each spec provided, make sure it refers to only one package.
+ if not multiple and len(current_matches) > 1:
+ msg_fmt = '"{0}" matches multiple packages: [{1}]'
+ errors.append(
+ msg_fmt.format(spec, ', '.join([m.format() for m in current_matches]))
+ )
+
+ # No installed package matches the query
+ if len(current_matches) == 0 and spec is not any:
+ msg_fmt = '"{0}" does not match any installed packages'
+ errors.append(msg_fmt.format(spec))
+
+ matching_specs.extend(current_matches)
+
+ if errors:
+ raise MatchError(
+ message="errors occurred when looking for specs in the store",
+ long_message='\n'.join(errors)
+ )
+
+ return matching_specs
+
+
+def specfile_matches(filename, **kwargs):
+ """Same as find but reads the query from a spec file.
+
+ Args:
+ filename (str): YAML or JSON file from which to read the query.
+ **kwargs: keyword arguments forwarded to "find"
+
+ Return:
+ List of matches
+ """
+ query = [spack.spec.Spec.from_specfile(filename)]
+ return spack.store.find(query, **kwargs)
+
+
@contextlib.contextmanager
def use_store(store_or_path):
"""Use the store passed as argument within the context manager.
@@ -314,3 +384,7 @@ def use_store(store_or_path):
store = original_store
db, layout = original_store.db, original_store.layout
root, unpadded_root = original_store.root, original_store.unpadded_root
+
+
+class MatchError(spack.error.SpackError):
+ """Error occurring when trying to match specs in store against a constraint"""
diff --git a/lib/spack/spack/test/bootstrap.py b/lib/spack/spack/test/bootstrap.py
index 99c1a61fd3..9ae4c85c6a 100644
--- a/lib/spack/spack/test/bootstrap.py
+++ b/lib/spack/spack/test/bootstrap.py
@@ -140,3 +140,30 @@ spack:
with spack.bootstrap.ensure_bootstrap_configuration():
pass
assert str(spack.store.root) == '/tmp/store'
+
+
+def test_nested_use_of_context_manager(mutable_config):
+ """Test nested use of the context manager"""
+ user_config = spack.config.config
+ with spack.bootstrap.ensure_bootstrap_configuration():
+ assert spack.config.config != user_config
+ with spack.bootstrap.ensure_bootstrap_configuration():
+ assert spack.config.config != user_config
+ assert spack.config.config == user_config
+
+
+@pytest.mark.parametrize('expected_missing', [False, True])
+def test_status_function_find_files(
+ mutable_config, mock_executable, tmpdir, monkeypatch, expected_missing
+):
+ if not expected_missing:
+ mock_executable('foo', 'echo Hello WWorld!')
+
+ monkeypatch.setattr(
+ spack.bootstrap, '_optional_requirements',
+ lambda: [spack.bootstrap._required_system_executable('foo', 'NOT FOUND')]
+ )
+ monkeypatch.setenv('PATH', str(tmpdir.join('bin')))
+
+ _, missing = spack.bootstrap.status_message('optional')
+ assert missing is expected_missing
diff --git a/lib/spack/spack/test/build_distribution.py b/lib/spack/spack/test/build_distribution.py
index 1514b3e903..042e7fe9f9 100644
--- a/lib/spack/spack/test/build_distribution.py
+++ b/lib/spack/spack/test/build_distribution.py
@@ -22,13 +22,12 @@ def test_build_tarball_overwrite(
install(str(spec))
# Runs fine the first time, throws the second time
- spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
+ spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
with pytest.raises(spack.binary_distribution.NoOverwriteException):
- spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
+ spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
# Should work fine with force=True
- spack.binary_distribution.build_tarball(
- spec, '.', force=True, unsigned=True)
+ spack.binary_distribution._build_tarball(spec, '.', force=True, unsigned=True)
# Remove the tarball and try again.
# This must *also* throw, because of the existing .spec.json file
@@ -38,4 +37,4 @@ def test_build_tarball_overwrite(
spack.binary_distribution.tarball_name(spec, '.spack')))
with pytest.raises(spack.binary_distribution.NoOverwriteException):
- spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
+ spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py
index b9105538b2..fec337eb8f 100644
--- a/lib/spack/spack/test/build_systems.py
+++ b/lib/spack/spack/test/build_systems.py
@@ -429,3 +429,19 @@ class TestXorgPackage(object):
assert pkg.urls[0] == 'https://www.x.org/archive/individual/' \
'util/util-macros-1.19.1.tar.bz2'
+
+
+def test_cmake_define_from_variant_conditional(config, mock_packages):
+ """Test that define_from_variant returns empty string when a condition on a variant
+ is not met. When this is the case, the variant is not set in the spec."""
+ s = Spec('cmake-conditional-variants-test').concretized()
+ assert 'example' not in s.variants
+ assert s.package.define_from_variant('EXAMPLE', 'example') == ''
+
+
+def test_autotools_args_from_conditional_variant(config, mock_packages):
+ """Test that _activate_or_not returns an empty string when a condition on a variant
+ is not met. When this is the case, the variant is not set in the spec."""
+ s = Spec('autotools-conditional-variants-test').concretized()
+ assert 'example' not in s.variants
+ assert len(s.package._activate_or_not('example', 'enable', 'disable')) == 0
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index 5381782480..f9cbe9fe17 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -102,7 +102,7 @@ common_compile_args = (
)
-@pytest.fixture(scope='session')
+@pytest.fixture(scope='function')
def wrapper_environment():
with set_env(
SPACK_CC=real_cc,
diff --git a/lib/spack/spack/test/cmd/audit.py b/lib/spack/spack/test/cmd/audit.py
index 2c9dc6b124..e7fe2d68da 100644
--- a/lib/spack/spack/test/cmd/audit.py
+++ b/lib/spack/spack/test/cmd/audit.py
@@ -41,7 +41,7 @@ def test_audit_packages_https(mutable_config, mock_packages):
assert audit.returncode == 1
# This uses http and should fail
- audit('packages-https', "preferred-test", fail_on_error=False)
+ audit('packages-https', "test-dependency", fail_on_error=False)
assert audit.returncode == 1
# providing one or more package names with https should work
diff --git a/lib/spack/spack/test/cmd/build_env.py b/lib/spack/spack/test/cmd/build_env.py
index d3d941f0b7..d7458e0d3c 100644
--- a/lib/spack/spack/test/cmd/build_env.py
+++ b/lib/spack/spack/test/cmd/build_env.py
@@ -15,12 +15,12 @@ build_env = SpackCommand('build-env')
('zlib',),
('zlib', '--')
])
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
def test_it_just_runs(pkg):
build_env(*pkg)
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
def test_error_when_multiple_specs_are_given():
output = build_env('libelf libdwarf', fail_on_error=False)
assert 'only takes one spec' in output
@@ -31,7 +31,7 @@ def test_error_when_multiple_specs_are_given():
('--',),
(),
])
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
def test_build_env_requires_a_spec(args):
output = build_env(*args, fail_on_error=False)
assert 'requires a spec' in output
@@ -40,7 +40,7 @@ def test_build_env_requires_a_spec(args):
_out_file = 'env.out'
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
def test_dump(tmpdir):
with tmpdir.as_cwd():
build_env('--dump', _out_file, 'zlib')
@@ -48,7 +48,7 @@ def test_dump(tmpdir):
assert(any(line.startswith('PATH=') for line in f.readlines()))
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
def test_pickle(tmpdir):
with tmpdir.as_cwd():
build_env('--pickle', _out_file, 'zlib')
diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py
index 6c05b03d59..c741377ce6 100644
--- a/lib/spack/spack/test/cmd/checksum.py
+++ b/lib/spack/spack/test/cmd/checksum.py
@@ -30,7 +30,7 @@ def test_checksum_args(arguments, expected):
@pytest.mark.parametrize('arguments,expected', [
- (['--batch', 'preferred-test'], 'versions of preferred-test'),
+ (['--batch', 'preferred-test'], 'version of preferred-test'),
(['--latest', 'preferred-test'], 'Found 1 version'),
(['--preferred', 'preferred-test'], 'Found 1 version'),
])
@@ -47,7 +47,7 @@ def test_checksum_interactive(
monkeypatch.setattr(tty, 'get_number', _get_number)
output = spack_checksum('preferred-test')
- assert 'versions of preferred-test' in output
+ assert 'version of preferred-test' in output
assert 'version(' in output
diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py
index c17ab1b4d5..b9bf02adb2 100644
--- a/lib/spack/spack/test/cmd/ci.py
+++ b/lib/spack/spack/test/cmd/ci.py
@@ -12,8 +12,8 @@ import pytest
from jsonschema import ValidationError, validate
import spack
+import spack.binary_distribution
import spack.ci as ci
-import spack.cmd.buildcache as buildcache
import spack.compilers as compilers
import spack.config
import spack.environment as ev
@@ -897,11 +897,11 @@ spack:
set_env_var('SPACK_COMPILER_ACTION', 'NONE')
set_env_var('SPACK_REMOTE_MIRROR_URL', mirror_url)
- def fake_dl_method(spec, dest, require_cdashid, m_url=None):
+ def fake_dl_method(spec, *args, **kwargs):
print('fake download buildcache {0}'.format(spec.name))
monkeypatch.setattr(
- buildcache, 'download_buildcache_files', fake_dl_method)
+ spack.binary_distribution, 'download_single_spec', fake_dl_method)
ci_out = ci_cmd('rebuild', output=str)
@@ -970,8 +970,7 @@ spack:
install_cmd('--keep-stage', json_path)
# env, spec, json_path, mirror_url, build_id, sign_binaries
- ci.push_mirror_contents(
- env, concrete_spec, json_path, mirror_url, True)
+ ci.push_mirror_contents(env, json_path, mirror_url, True)
ci.write_cdashid_to_mirror('42', concrete_spec, mirror_url)
@@ -1063,23 +1062,20 @@ spack:
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
- def faked(env, spec_file=None, packages=None, add_spec=True,
- add_deps=True, output_location=os.getcwd(),
- signing_key=None, force=False, make_relative=False,
- unsigned=False, allow_root=False, rebuild_index=False):
+ def failing_access(*args, **kwargs):
raise Exception('Error: Access Denied')
- import spack.cmd.buildcache as buildcache
- monkeypatch.setattr(buildcache, '_createtarball', faked)
+ monkeypatch.setattr(spack.ci, '_push_mirror_contents', failing_access)
+ # Input doesn't matter, as wwe are faking exceptional output
url = 'fakejunk'
- ci.push_mirror_contents(None, None, None, url, None)
+ ci.push_mirror_contents(None, None, url, None)
captured = capsys.readouterr()
std_out = captured[0]
expect_msg = 'Permission problem writing to {0}'.format(url)
- assert(expect_msg in std_out)
+ assert expect_msg in std_out
def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path,
diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py
index 765d4dc81b..4c59e300be 100644
--- a/lib/spack/spack/test/cmd/dev_build.py
+++ b/lib/spack/spack/test/cmd/dev_build.py
@@ -134,7 +134,7 @@ def mock_module_noop(*args):
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch,
- install_mockery):
+ install_mockery, working_env):
monkeypatch.setattr(os, 'execvp', print_spack_cc)
monkeypatch.setattr(spack.build_environment, 'module', mock_module_noop)
diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py
index 8716d59e05..4c0238deb2 100644
--- a/lib/spack/spack/test/cmd/mirror.py
+++ b/lib/spack/spack/test/cmd/mirror.py
@@ -155,7 +155,7 @@ def test_mirror_crud(tmp_scope, capsys):
# no-op
output = mirror('set-url', '--scope', tmp_scope,
'mirror', 'http://spack.io')
- assert 'Url already set' in output
+ assert 'No changes made' in output
output = mirror('set-url', '--scope', tmp_scope,
'--push', 'mirror', 's3://spack-public')
@@ -164,7 +164,32 @@ def test_mirror_crud(tmp_scope, capsys):
# no-op
output = mirror('set-url', '--scope', tmp_scope,
'--push', 'mirror', 's3://spack-public')
- assert 'Url already set' in output
+ assert 'No changes made' in output
+
+ output = mirror('remove', '--scope', tmp_scope, 'mirror')
+ assert 'Removed mirror' in output
+
+ # Test S3 connection info token
+ mirror('add', '--scope', tmp_scope,
+ '--s3-access-token', 'aaaaaazzzzz',
+ 'mirror', 's3://spack-public')
+
+ output = mirror('remove', '--scope', tmp_scope, 'mirror')
+ assert 'Removed mirror' in output
+
+ # Test S3 connection info id/key
+ mirror('add', '--scope', tmp_scope,
+ '--s3-access-key-id', 'foo', '--s3-access-key-secret', 'bar',
+ 'mirror', 's3://spack-public')
+
+ output = mirror('remove', '--scope', tmp_scope, 'mirror')
+ assert 'Removed mirror' in output
+
+ # Test S3 connection info with endpoint URL
+ mirror('add', '--scope', tmp_scope,
+ '--s3-access-token', 'aaaaaazzzzz',
+ '--s3-endpoint-url', 'http://localhost/',
+ 'mirror', 's3://spack-public')
output = mirror('remove', '--scope', tmp_scope, 'mirror')
assert 'Removed mirror' in output
diff --git a/lib/spack/spack/test/cmd/style.py b/lib/spack/spack/test/cmd/style.py
index 29cde14400..4e8b3b9784 100644
--- a/lib/spack/spack/test/cmd/style.py
+++ b/lib/spack/spack/test/cmd/style.py
@@ -41,7 +41,8 @@ pytestmark = pytest.mark.skipif(not has_develop_branch(),
# The style tools have requirements to use newer Python versions. We simplify by
# requiring Python 3.6 or higher to run spack style.
skip_old_python = pytest.mark.skipif(
- sys.version_info < (3, 6), reason='requires Python 3.6 or higher')
+ sys.version_info < (3, 6), reason='requires Python 3.6 or higher'
+)
@pytest.fixture(scope="function")
@@ -164,18 +165,6 @@ def test_style_is_package(tmpdir):
assert not spack.cmd.style.is_package("lib/spack/external/pytest.py")
-@skip_old_python
-def test_bad_bootstrap(monkeypatch):
- """Ensure we fail gracefully when we can't bootstrap spack style."""
- monkeypatch.setattr(spack.cmd.style, "tool_order", [
- ("isort", "py-isort@4.3:4.0"), # bad spec to force concretization failure
- ])
- # zero out path to ensure we don't find isort
- with pytest.raises(spack.error.SpackError) as e:
- style(env={"PATH": ""})
- assert "Couldn't bootstrap isort" in str(e)
-
-
@pytest.fixture
def external_style_root(flake8_package_with_errors, tmpdir):
"""Create a mock git repository for running spack style."""
diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py
index 9ee117b281..f82f767bc8 100644
--- a/lib/spack/spack/test/cmd/test.py
+++ b/lib/spack/spack/test/cmd/test.py
@@ -11,6 +11,7 @@ import pytest
import spack.cmd.install
import spack.config
import spack.package
+import spack.store
from spack.cmd.test import has_test_method
from spack.main import SpackCommand
@@ -231,3 +232,31 @@ def test_has_test_method_fails(capsys):
captured = capsys.readouterr()[1]
assert 'is not a class' in captured
+
+
+def test_hash_change(mock_test_stage, mock_packages, mock_archive, mock_fetch,
+ install_mockery_mutable_config):
+ """Ensure output printed from pkgs is captured by output redirection."""
+ install('printing-package')
+ spack_test('run', '--alias', 'printpkg', 'printing-package')
+
+ stage_files = os.listdir(mock_test_stage)
+
+ # Grab test stage directory contents
+ testdir = os.path.join(mock_test_stage, stage_files[0])
+
+ outfile = os.path.join(testdir, 'test_suite.lock')
+ with open(outfile, 'r') as f:
+ output = f.read()
+ changed_hash = output.replace(
+ spack.store.db.query('printing-package')[0].full_hash(),
+ 'fakehash492ucwhwvzhxfbmcc45x49ha')
+ with open(outfile, 'w') as f:
+ f.write(changed_hash)
+
+ # The find command should show the contents
+ find_output = spack_test('find')
+ assert 'printpkg' in find_output
+ # The results should be obtainable
+ results_output = spack_test('results')
+ assert 'PASSED' in results_output
diff --git a/lib/spack/spack/test/cmd/unit_test.py b/lib/spack/spack/test/cmd/unit_test.py
index 1a273ff244..9811bdbfbd 100644
--- a/lib/spack/spack/test/cmd/unit_test.py
+++ b/lib/spack/spack/test/cmd/unit_test.py
@@ -22,7 +22,10 @@ def test_list_with_pytest_arg():
def test_list_with_keywords():
- output = spack_test('--list', '-k', 'cmd/unit_test.py')
+ # Here we removed querying with a "/" to separate directories
+ # since the behavior is inconsistent across different pytest
+ # versions, see https://stackoverflow.com/a/48814787/771663
+ output = spack_test('--list', '-k', 'unit_test.py')
assert output.strip() == cmd_test_py
diff --git a/lib/spack/spack/test/cmd/url.py b/lib/spack/spack/test/cmd/url.py
index f9179720b1..249ea6ec57 100644
--- a/lib/spack/spack/test/cmd/url.py
+++ b/lib/spack/spack/test/cmd/url.py
@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
-import sys
import pytest
@@ -69,14 +68,6 @@ def test_url_with_no_version_fails():
url('parse', 'http://www.netlib.org/voronoi/triangle.zip')
-skip_python_26 = pytest.mark.skipif(
- sys.version_info < (2, 7),
- reason="Python 2.6 tests are run in a container, where "
- "networking is super slow"
-)
-
-
-@skip_python_26
def test_url_list(mock_packages):
out = url('list')
total_urls = len(out.split('\n'))
@@ -106,7 +97,6 @@ def test_url_list(mock_packages):
assert 0 < correct_version_urls < total_urls
-@skip_python_26
def test_url_summary(mock_packages):
"""Test the URL summary command."""
# test url_summary, the internal function that does the work
@@ -133,7 +123,6 @@ def test_url_summary(mock_packages):
assert out_correct_versions == correct_versions
-@skip_python_26
def test_url_stats(capfd, mock_packages):
with capfd.disabled():
output = url('stats')
diff --git a/lib/spack/spack/test/compilers/detection.py b/lib/spack/spack/test/compilers/detection.py
index 8bc3285d43..6eaced6b6d 100644
--- a/lib/spack/spack/test/compilers/detection.py
+++ b/lib/spack/spack/test/compilers/detection.py
@@ -204,6 +204,11 @@ def test_intel_version_detection(version_str, expected_version):
'Copyright (C) 1985-2021 Intel Corporation. All rights reserved.',
'2021.4.0'
),
+ ( # IFX
+ 'ifx (IFORT) 2022.0.0 20211123\n'
+ 'Copyright (C) 1985-2021 Intel Corporation. All rights reserved.',
+ '2022.0.0'
+ ),
])
def test_oneapi_version_detection(version_str, expected_version):
version = spack.compilers.oneapi.Oneapi.extract_version_from_output(
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
index 733e3efa77..80d9b0eb94 100644
--- a/lib/spack/spack/test/concretize_preferences.py
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -176,16 +176,16 @@ class TestConcretizePreferences(object):
def test_preferred(self):
""""Test packages with some version marked as preferred=True"""
- spec = Spec('preferred-test')
+ spec = Spec('python')
spec.concretize()
- assert spec.version == Version('0.2.15')
+ assert spec.version == Version('2.7.11')
# now add packages.yaml with versions other than preferred
# ensure that once config is in place, non-preferred version is used
- update_packages('preferred-test', 'version', ['0.2.16'])
- spec = Spec('preferred-test')
+ update_packages('python', 'version', ['3.5.0'])
+ spec = Spec('python')
spec.concretize()
- assert spec.version == Version('0.2.16')
+ assert spec.version == Version('3.5.0')
def test_develop(self):
"""Test concretization with develop-like versions"""
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 9a6dc0b13b..d502cf1db1 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -430,8 +430,14 @@ def _skip_if_missing_executables(request):
"""Permits to mark tests with 'require_executables' and skip the
tests if the executables passed as arguments are not found.
"""
- if request.node.get_marker('requires_executables'):
- required_execs = request.node.get_marker('requires_executables').args
+ if hasattr(request.node, 'get_marker'):
+ # TODO: Remove the deprecated API as soon as we drop support for Python 2.6
+ marker = request.node.get_marker('requires_executables')
+ else:
+ marker = request.node.get_closest_marker('requires_executables')
+
+ if marker:
+ required_execs = marker.args
missing_execs = [
x for x in required_execs if spack.util.executable.which(x) is None
]
@@ -1453,7 +1459,7 @@ def invalid_spec(request):
return request.param
-@pytest.fixture("module")
+@pytest.fixture(scope='module')
def mock_test_repo(tmpdir_factory):
"""Create an empty repository."""
repo_namespace = 'mock_test_repo'
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 57a03a5db9..925acc83d3 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -909,3 +909,18 @@ def test_database_works_with_empty_dir(tmpdir):
db.query()
# Check that reading an empty directory didn't create a new index.json
assert not os.path.exists(db._index_path)
+
+
+@pytest.mark.parametrize('query_arg,exc_type,msg_str', [
+ (['callpath'], spack.store.MatchError, 'matches multiple packages'),
+ (['tensorflow'], spack.store.MatchError, 'does not match any')
+])
+def test_store_find_failures(database, query_arg, exc_type, msg_str):
+ with pytest.raises(exc_type) as exc_info:
+ spack.store.find(query_arg, multiple=False)
+ assert msg_str in str(exc_info.value)
+
+
+def test_store_find_accept_string(database):
+ result = spack.store.find('callpath', multiple=True)
+ assert len(result) == 3
diff --git a/lib/spack/spack/test/directives.py b/lib/spack/spack/test/directives.py
index 31038ebb47..ad7c98bdb9 100644
--- a/lib/spack/spack/test/directives.py
+++ b/lib/spack/spack/test/directives.py
@@ -51,3 +51,12 @@ def test_constraints_from_context_are_merged(mock_packages):
assert pkg_cls.dependencies
assert spack.spec.Spec('@0.14:15 ^b@3.8:4.0') in pkg_cls.dependencies['c']
+
+
+@pytest.mark.regression('27754')
+def test_extends_spec(config, mock_packages):
+ extender = spack.spec.Spec('extends-spec').concretized()
+ extendee = spack.spec.Spec('extendee').concretized()
+
+ assert extender.dependencies
+ assert extender.package.extends(extendee)
diff --git a/lib/spack/spack/test/llnl/util/tty/log.py b/lib/spack/spack/test/llnl/util/tty/log.py
index 78d4e24fa4..c3cb96da8a 100644
--- a/lib/spack/spack/test/llnl/util/tty/log.py
+++ b/lib/spack/spack/test/llnl/util/tty/log.py
@@ -62,6 +62,24 @@ def test_log_python_output_without_echo(capfd, tmpdir):
assert capfd.readouterr()[0] == ''
+def test_log_python_output_with_invalid_utf8(capfd, tmpdir):
+ with tmpdir.as_cwd():
+ with log_output('foo.txt'):
+ sys.stdout.buffer.write(b'\xc3\x28\n')
+
+ # python2 and 3 treat invalid UTF-8 differently
+ if sys.version_info.major == 2:
+ expected = b'\xc3(\n'
+ else:
+ expected = b'<line lost: output was not encoded as UTF-8>\n'
+ with open('foo.txt', 'rb') as f:
+ written = f.read()
+ assert written == expected
+
+ # nothing on stdout or stderr
+ assert capfd.readouterr()[0] == ''
+
+
def test_log_python_output_and_echo_output(capfd, tmpdir):
with tmpdir.as_cwd():
# echo two lines
@@ -429,10 +447,6 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
(mock_shell_v_v, nullcontext),
(mock_shell_v_v_no_termios, no_termios),
])
-@pytest.mark.skipif(
- sys.version_info < (2, 7),
- reason="Python 2.6 tests are run in a container, where this fails often"
-)
def test_foreground_background_output(
test_fn, capfd, termios_on_or_off, tmpdir):
"""Tests hitting 'v' toggles output, and that force_echo works."""
diff --git a/lib/spack/spack/test/monitor.py b/lib/spack/spack/test/monitor.py
index b060c725ee..e5888231e5 100644
--- a/lib/spack/spack/test/monitor.py
+++ b/lib/spack/spack/test/monitor.py
@@ -18,18 +18,13 @@ from spack.monitor import SpackMonitorClient
install = SpackCommand('install')
-def get_client(host, prefix="ms1", disable_auth=False, allow_fail=False, tags=None,
- save_local=False):
+def get_client(host, prefix="ms1", allow_fail=False, tags=None, save_local=False):
"""
We replicate this function to not generate a global client.
"""
cli = SpackMonitorClient(host=host, prefix=prefix, allow_fail=allow_fail,
tags=tags, save_local=save_local)
- # If we don't disable auth, environment credentials are required
- if not disable_auth and not save_local:
- cli.require_auth()
-
# We will exit early if the monitoring service is not running, but
# only if we aren't doing a local save
if not save_local:
@@ -131,20 +126,17 @@ def mock_monitor_request(monkeypatch):
def test_spack_monitor_auth(mock_monitor_request):
- with pytest.raises(SystemExit):
- get_client(host="http://127.0.0.1")
-
os.environ["SPACKMON_TOKEN"] = "xxxxxxxxxxxxxxxxx"
os.environ["SPACKMON_USER"] = "spackuser"
get_client(host="http://127.0.0.1")
def test_spack_monitor_without_auth(mock_monitor_request):
- get_client(host="hostname", disable_auth=True)
+ get_client(host="hostname")
def test_spack_monitor_build_env(mock_monitor_request, install_mockery_mutable_config):
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
assert hasattr(monitor, "build_environment")
for key in ["host_os", "platform", "host_target", "hostname", "spack_version",
"kernel_version"]:
@@ -157,7 +149,7 @@ def test_spack_monitor_build_env(mock_monitor_request, install_mockery_mutable_c
def test_spack_monitor_basic_auth(mock_monitor_request):
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
# Headers should be empty
assert not monitor.headers
@@ -167,7 +159,7 @@ def test_spack_monitor_basic_auth(mock_monitor_request):
def test_spack_monitor_new_configuration(mock_monitor_request, install_mockery):
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
spec = spack.spec.Spec("dttop")
spec.concretize()
response = monitor.new_configuration([spec])
@@ -178,7 +170,7 @@ def test_spack_monitor_new_configuration(mock_monitor_request, install_mockery):
def test_spack_monitor_new_build(mock_monitor_request, install_mockery_mutable_config,
install_mockery):
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
spec = spack.spec.Spec("dttop")
spec.concretize()
response = monitor.new_build(spec)
@@ -190,7 +182,7 @@ def test_spack_monitor_new_build(mock_monitor_request, install_mockery_mutable_c
def test_spack_monitor_update_build(mock_monitor_request, install_mockery,
install_mockery_mutable_config):
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
spec = spack.spec.Spec("dttop")
spec.concretize()
response = monitor.update_build(spec, status="SUCCESS")
@@ -200,7 +192,7 @@ def test_spack_monitor_update_build(mock_monitor_request, install_mockery,
def test_spack_monitor_fail_task(mock_monitor_request, install_mockery,
install_mockery_mutable_config):
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
spec = spack.spec.Spec("dttop")
spec.concretize()
response = monitor.fail_task(spec)
@@ -215,7 +207,7 @@ def test_spack_monitor_send_analyze_metadata(monkeypatch, mock_monitor_request,
def buildid(*args, **kwargs):
return 1
monkeypatch.setattr(spack.monitor.SpackMonitorClient, "get_build_id", buildid)
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
spec = spack.spec.Spec("dttop")
spec.concretize()
response = monitor.send_analyze_metadata(spec.package, metadata={"boop": "beep"})
@@ -226,7 +218,7 @@ def test_spack_monitor_send_analyze_metadata(monkeypatch, mock_monitor_request,
def test_spack_monitor_send_phase(mock_monitor_request, install_mockery,
install_mockery_mutable_config):
- monitor = get_client(host="hostname", disable_auth=True)
+ monitor = get_client(host="hostname")
def get_build_id(*args, **kwargs):
return 1
diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py
index 1e4b771fa6..61a605c976 100644
--- a/lib/spack/spack/test/relocate.py
+++ b/lib/spack/spack/test/relocate.py
@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import collections
import os.path
import re
import shutil
@@ -73,47 +72,6 @@ def source_file(tmpdir, is_relocatable):
return src
-@pytest.fixture(params=['which_found', 'installed', 'to_be_installed'])
-def expected_patchelf_path(request, mutable_database, monkeypatch):
- """Prepare the stage to tests different cases that can occur
- when searching for patchelf.
- """
- case = request.param
-
- # Mock the which function
- which_fn = {
- 'which_found': lambda x: collections.namedtuple(
- '_', ['path']
- )('/usr/bin/patchelf')
- }
- monkeypatch.setattr(
- spack.util.executable, 'which',
- which_fn.setdefault(case, lambda x: None)
- )
- if case == 'which_found':
- return '/usr/bin/patchelf'
-
- # TODO: Mock a case for Darwin architecture
-
- spec = spack.spec.Spec('patchelf')
- spec.concretize()
-
- patchelf_cls = type(spec.package)
- do_install = patchelf_cls.do_install
- expected_path = os.path.join(spec.prefix.bin, 'patchelf')
-
- def do_install_mock(self, **kwargs):
- do_install(self, fake=True)
- with open(expected_path):
- pass
-
- monkeypatch.setattr(patchelf_cls, 'do_install', do_install_mock)
- if case == 'installed':
- spec.package.do_install()
-
- return expected_path
-
-
@pytest.fixture()
def mock_patchelf(tmpdir, mock_executable):
def _factory(output):
@@ -227,6 +185,7 @@ def copy_binary():
@pytest.mark.requires_executables(
'/usr/bin/gcc', 'patchelf', 'strings', 'file'
)
+@skip_unless_linux
def test_file_is_relocatable(source_file, is_relocatable):
compiler = spack.util.executable.Executable('/usr/bin/gcc')
executable = str(source_file).replace('.c', '.x')
@@ -240,8 +199,9 @@ def test_file_is_relocatable(source_file, is_relocatable):
@pytest.mark.requires_executables('patchelf', 'strings', 'file')
+@skip_unless_linux
def test_patchelf_is_relocatable():
- patchelf = spack.relocate._patchelf()
+ patchelf = os.path.realpath(spack.relocate._patchelf())
assert llnl.util.filesystem.is_exe(patchelf)
assert spack.relocate.file_is_relocatable(patchelf)
@@ -263,12 +223,6 @@ def test_file_is_relocatable_errors(tmpdir):
assert 'is not an absolute path' in str(exc_info.value)
-@skip_unless_linux
-def test_search_patchelf(expected_patchelf_path):
- current = spack.relocate._patchelf()
- assert current == expected_patchelf_path
-
-
@pytest.mark.parametrize('patchelf_behavior,expected', [
('echo ', []),
('echo /opt/foo/lib:/opt/foo/lib64', ['/opt/foo/lib', '/opt/foo/lib64']),
diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py
index b337486e7f..0c6292af9d 100644
--- a/lib/spack/spack/test/stage.py
+++ b/lib/spack/spack/test/stage.py
@@ -10,7 +10,6 @@ import getpass
import os
import shutil
import stat
-import tempfile
import pytest
@@ -825,29 +824,6 @@ class TestStage(object):
assert os.path.exists(test_path)
shutil.rmtree(test_path)
- def test_get_stage_root_in_spack(self, clear_stage_root):
- """Ensure an instance path is an accessible build stage path."""
- base = canonicalize_path(os.path.join('$spack', '.spack-test-stage'))
- mkdirp(base)
- test_path = tempfile.mkdtemp(dir=base)
-
- try:
- with spack.config.override('config:build_stage', test_path):
- path = spack.stage.get_stage_root()
-
- assert 'spack' in path.split(os.path.sep)
-
- # Make sure cached stage path value was changed appropriately
- assert spack.stage._stage_root in (
- test_path, os.path.join(test_path, getpass.getuser()))
-
- # Make sure the directory exists
- assert os.path.isdir(spack.stage._stage_root)
-
- finally:
- # Clean up regardless of outcome
- shutil.rmtree(base)
-
def test_stage_constructor_no_fetcher(self):
"""Ensure Stage constructor with no URL or fetch strategy fails."""
with pytest.raises(ValueError):
diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py
index 2ccbf51225..41aa1e6121 100644
--- a/lib/spack/spack/test/web.py
+++ b/lib/spack/spack/test/web.py
@@ -2,9 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import collections
import os
-import ordereddict_backport
import pytest
import llnl.util.tty as tty
@@ -152,7 +152,7 @@ def test_get_header():
# If lookup has to fallback to fuzzy matching and there are more than one
# fuzzy match, the result depends on the internal ordering of the given
# mapping
- headers = ordereddict_backport.OrderedDict()
+ headers = collections.OrderedDict()
headers['Content-type'] = 'text/plain'
headers['contentType'] = 'text/html'
@@ -161,7 +161,7 @@ def test_get_header():
assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/html')
# Same as above, but different ordering
- headers = ordereddict_backport.OrderedDict()
+ headers = collections.OrderedDict()
headers['contentType'] = 'text/html'
headers['Content-type'] = 'text/plain'
@@ -246,10 +246,33 @@ class MockS3Client(object):
raise self.ClientError
+def test_gather_s3_information(monkeypatch, capfd):
+ mock_connection_data = {"access_token": "AAAAAAA",
+ "profile": "SPacKDeV",
+ "access_pair": ("SPA", "CK"),
+ "endpoint_url": "https://127.0.0.1:8888"}
+
+ session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mock_connection_data) # noqa: E501
+
+ # Session args are used to create the S3 Session object
+ assert "aws_session_token" in session_args
+ assert session_args.get("aws_session_token") == "AAAAAAA"
+ assert "aws_access_key_id" in session_args
+ assert session_args.get("aws_access_key_id") == "SPA"
+ assert "aws_secret_access_key" in session_args
+ assert session_args.get("aws_secret_access_key") == "CK"
+ assert "profile_name" in session_args
+ assert session_args.get("profile_name") == "SPacKDeV"
+
+ # In addition to the session object, use the client_args to create the s3
+ # Client object
+ assert "endpoint_url" in client_args
+
+
def test_remove_s3_url(monkeypatch, capfd):
fake_s3_url = 's3://my-bucket/subdirectory/mirror'
- def mock_create_s3_session(url):
+ def mock_create_s3_session(url, connection={}):
return MockS3Client()
monkeypatch.setattr(
@@ -269,7 +292,7 @@ def test_remove_s3_url(monkeypatch, capfd):
def test_s3_url_exists(monkeypatch, capfd):
- def mock_create_s3_session(url):
+ def mock_create_s3_session(url, connection={}):
return MockS3Client()
monkeypatch.setattr(
spack.util.s3, 'create_s3_session', mock_create_s3_session)
diff --git a/lib/spack/spack/util/mock_package.py b/lib/spack/spack/util/mock_package.py
index d8b3ec468b..ab7b8439e5 100644
--- a/lib/spack/spack/util/mock_package.py
+++ b/lib/spack/spack/util/mock_package.py
@@ -2,10 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
"""Infrastructure used by tests for mocking packages and repos."""
-
-import ordereddict_backport
+import collections
import spack.provider_index
import spack.util.naming
@@ -149,7 +147,7 @@ class MockPackageMultiRepo(object):
MockPackage._repo = self
# set up dependencies
- MockPackage.dependencies = ordereddict_backport.OrderedDict()
+ MockPackage.dependencies = collections.OrderedDict()
for dep, dtype in zip(dependencies, dependency_types):
d = Dependency(MockPackage, Spec(dep.name), type=dtype)
if not conditions or dep.name not in conditions:
diff --git a/lib/spack/spack/util/s3.py b/lib/spack/spack/util/s3.py
index b9b56e0498..82d51eb72e 100644
--- a/lib/spack/spack/util/s3.py
+++ b/lib/spack/spack/util/s3.py
@@ -11,6 +11,15 @@ import spack
import spack.util.url as url_util
+def get_mirror_connection(url, url_type="push"):
+ connection = {}
+ # Try to find a mirror for potential connection information
+ for mirror in spack.mirror.MirrorCollection().values():
+ if "%s://%s" % (url.scheme, url.netloc) == mirror.push_url:
+ connection = mirror.to_dict()[url_type]
+ return connection
+
+
def _parse_s3_endpoint_url(endpoint_url):
if not urllib_parse.urlparse(endpoint_url, scheme='').scheme:
endpoint_url = '://'.join(('https', endpoint_url))
@@ -18,7 +27,31 @@ def _parse_s3_endpoint_url(endpoint_url):
return endpoint_url
-def create_s3_session(url):
+def get_mirror_s3_connection_info(connection):
+ s3_connection = {}
+
+ s3_connection_is_dict = connection and isinstance(connection, dict)
+ if s3_connection_is_dict:
+ if connection.get("access_token"):
+ s3_connection["aws_session_token"] = connection["access_token"]
+ if connection.get("access_pair"):
+ s3_connection["aws_access_key_id"] = connection["access_pair"][0]
+ s3_connection["aws_secret_access_key"] = connection["access_pair"][1]
+ if connection.get("profile"):
+ s3_connection["profile_name"] = connection["profile"]
+
+ s3_client_args = {"use_ssl": spack.config.get('config:verify_ssl')}
+
+ endpoint_url = os.environ.get('S3_ENDPOINT_URL')
+ if endpoint_url:
+ s3_client_args['endpoint_url'] = _parse_s3_endpoint_url(endpoint_url)
+ elif s3_connection_is_dict and connection.get("endpoint_url"):
+ s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(connection["endpoint_url"]) # noqa: E501
+
+ return (s3_connection, s3_client_args)
+
+
+def create_s3_session(url, connection={}):
url = url_util.parse(url)
if url.scheme != 's3':
raise ValueError(
@@ -31,14 +64,9 @@ def create_s3_session(url):
from boto3 import Session
from botocore.exceptions import ClientError
- session = Session()
-
- s3_client_args = {"use_ssl": spack.config.get('config:verify_ssl')}
-
- endpoint_url = os.environ.get('S3_ENDPOINT_URL')
- if endpoint_url:
- s3_client_args['endpoint_url'] = _parse_s3_endpoint_url(endpoint_url)
+ s3_connection, s3_client_args = get_mirror_s3_connection_info(connection)
+ session = Session(**s3_connection)
# if no access credentials provided above, then access anonymously
if not session.get_credentials():
from botocore import UNSIGNED
diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py
index a67b6491f8..9812943a79 100644
--- a/lib/spack/spack/util/spack_yaml.py
+++ b/lib/spack/spack/util/spack_yaml.py
@@ -12,13 +12,13 @@
default unorderd dict.
"""
+import collections
import ctypes
import re
import sys
from typing import List # novm
import ruamel.yaml as yaml
-from ordereddict_backport import OrderedDict
from ruamel.yaml import RoundTripDumper, RoundTripLoader
from six import StringIO, string_types
@@ -39,7 +39,7 @@ __all__ = ['load', 'dump', 'SpackYAMLError']
# Also, use OrderedDict instead of just dict.
-class syaml_dict(OrderedDict):
+class syaml_dict(collections.OrderedDict):
def __repr__(self):
mappings = ('%r: %r' % (k, v) for k, v in self.items())
return '{%s}' % ', '.join(mappings)
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index f1b01ae310..2db91b8080 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -193,7 +193,8 @@ def push_to_url(
while remote_path.startswith('/'):
remote_path = remote_path[1:]
- s3 = s3_util.create_s3_session(remote_url)
+ s3 = s3_util.create_s3_session(remote_url,
+ connection=s3_util.get_mirror_connection(remote_url)) # noqa: E501
s3.upload_file(local_file_path, remote_url.netloc,
remote_path, ExtraArgs=extra_args)
@@ -219,7 +220,9 @@ def url_exists(url):
return os.path.exists(local_path)
if url.scheme == 's3':
- s3 = s3_util.create_s3_session(url)
+ # Check for URL specific connection information
+ s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url)) # noqa: E501
+
try:
s3.get_object(Bucket=url.netloc, Key=url.path.lstrip('/'))
return True
@@ -263,7 +266,8 @@ def remove_url(url, recursive=False):
return
if url.scheme == 's3':
- s3 = s3_util.create_s3_session(url)
+ # Try to find a mirror for potential connection information
+ s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url)) # noqa: E501
bucket = url.netloc
if recursive:
# Because list_objects_v2 can only return up to 1000 items
diff --git a/share/spack/docker/amazonlinux-2.dockerfile b/share/spack/docker/amazonlinux-2.dockerfile
index d4066f76c4..93d67d9c4d 100644
--- a/share/spack/docker/amazonlinux-2.dockerfile
+++ b/share/spack/docker/amazonlinux-2.dockerfile
@@ -63,7 +63,6 @@ WORKDIR /root
SHELL ["docker-shell"]
# TODO: add a command to Spack that (re)creates the package cache
-RUN spack bootstrap untrust spack-install
RUN spack spec hdf5+mpi
ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"]
diff --git a/share/spack/docker/ubuntu-1604.dockerfile b/share/spack/docker/ubuntu-1604.dockerfile
index dd7fcb1910..f5662408ee 100644
--- a/share/spack/docker/ubuntu-1604.dockerfile
+++ b/share/spack/docker/ubuntu-1604.dockerfile
@@ -67,7 +67,6 @@ WORKDIR /root
SHELL ["docker-shell"]
# TODO: add a command to Spack that (re)creates the package cache
-RUN spack bootstrap untrust spack-install
RUN spack spec hdf5+mpi
ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"]
diff --git a/share/spack/docker/ubuntu-1804.dockerfile b/share/spack/docker/ubuntu-1804.dockerfile
index 280f0b9a7d..d491174e97 100644
--- a/share/spack/docker/ubuntu-1804.dockerfile
+++ b/share/spack/docker/ubuntu-1804.dockerfile
@@ -67,7 +67,6 @@ WORKDIR /root
SHELL ["docker-shell"]
# TODO: add a command to Spack that (re)creates the package cache
-RUN spack bootstrap untrust spack-install
RUN spack spec hdf5+mpi
ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"]
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml
index f1f0b46393..26a594228d 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-on-power/spack.yaml
@@ -59,24 +59,33 @@ spack:
- cuda_specs:
- amrex +cuda cuda_arch=70
- caliper +cuda cuda_arch=70
- - chai +cuda ~benchmarks ~tests cuda_arch=70 ^umpire~shared+cuda
+ - chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared
- ginkgo +cuda cuda_arch=70
+ - heffte +cuda cuda_arch=70
- hpx +cuda cuda_arch=70
- - kokkos +cuda +wrapper cuda_arch=70
- - kokkos-kernels +cuda cuda_arch=70 ^kokkos +cuda +wrapper cuda_arch=70
- - magma cuda_arch=70
+ - hypre +cuda cuda_arch=70
+ - kokkos +wrapper +cuda cuda_arch=70
+ - kokkos-kernels +cuda cuda_arch=70 ^kokkos +wrapper +cuda cuda_arch=70
+ - magma +cuda cuda_arch=70
+ - mfem +cuda cuda_arch=70
+ - parsec +cuda cuda_arch=70
+ - petsc +cuda cuda_arch=70
- raja +cuda cuda_arch=70
- slate +cuda cuda_arch=70
- - strumpack +cuda ~slate cuda_arch=70
+ - slepc +cuda cuda_arch=70
+ - strumpack ~slate +cuda cuda_arch=70
- sundials +cuda cuda_arch=70
- superlu-dist +cuda cuda_arch=70
- tasmanian +cuda cuda_arch=70
- - umpire +cuda ~shared cuda_arch=70
+ - trilinos@13.2.0 +cuda cuda_arch=70
+ - umpire ~shared +cuda cuda_arch=70
+ - vtk-m +cuda cuda_arch=70
- zfp +cuda cuda_arch=70
- #- ascent +cuda ~shared cuda_arch=70
- #- axom +cuda cuda_arch=70 ^umpire~shared
- #- hypre +cuda cuda_arch=70
- #- mfem +cuda cuda_arch=70
+ #- ascent ~shared +cuda cuda_arch=70
+ #- axom +cuda cuda_arch=70 ^umpire ~shared
+ #- dealii +cuda cuda_arch=70 # gmsh
+ #- flecsi +cuda cuda_arch=70
+ #- paraview +cuda cuda_arch=70
- default_specs:
- adios
@@ -164,7 +173,7 @@ spack:
- swig@4.0.2-fortran
- sz
- tasmanian
- - tau
+ - tau +mpi +python
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml
index ab226f8e83..3e86a84c18 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml
@@ -59,24 +59,33 @@ spack:
- cuda_specs:
- amrex +cuda cuda_arch=70
- caliper +cuda cuda_arch=70
- - chai +cuda ~benchmarks ~tests cuda_arch=70 ^umpire~shared+cuda
+ - chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared
- ginkgo +cuda cuda_arch=70
+ - heffte +cuda cuda_arch=70
- hpx +cuda cuda_arch=70
- - kokkos +cuda +wrapper cuda_arch=70
- - kokkos-kernels +cuda cuda_arch=70 ^kokkos +cuda +wrapper cuda_arch=70
- - magma cuda_arch=70
+ - hypre +cuda cuda_arch=70
+ - kokkos +wrapper +cuda cuda_arch=70
+ - kokkos-kernels +cuda cuda_arch=70 ^kokkos +wrapper +cuda cuda_arch=70
+ - magma +cuda cuda_arch=70
+ - mfem +cuda cuda_arch=70
+ - parsec +cuda cuda_arch=70
+ - petsc +cuda cuda_arch=70
- raja +cuda cuda_arch=70
- slate +cuda cuda_arch=70
- - strumpack +cuda ~slate cuda_arch=70
+ - slepc +cuda cuda_arch=70
+ - strumpack ~slate +cuda cuda_arch=70
- sundials +cuda cuda_arch=70
- superlu-dist +cuda cuda_arch=70
- tasmanian +cuda cuda_arch=70
+ - trilinos@13.2.0 +cuda cuda_arch=70
+ - umpire ~shared +cuda cuda_arch=70
+ - vtk-m +cuda cuda_arch=70
- zfp +cuda cuda_arch=70
- #- ascent +cuda ~shared cuda_arch=70
- #- axom +cuda cuda_arch=70 ^umpire@4.1.2 ~shared
- #- hypre +cuda cuda_arch=70
- #- mfem +cuda cuda_arch=70
- #- umpire +cuda ~shared cuda_arch=70 # unsatisfiable concretization conflict w/ blt
+ #- ascent ~shared +cuda cuda_arch=70
+ #- axom +cuda cuda_arch=70 ^umpire ~shared
+ #- dealii +cuda cuda_arch=70 # gmsh
+ #- flecsi +cuda cuda_arch=70
+ #- paraview +cuda cuda_arch=70
- rocm_specs:
- kokkos +rocm amdgpu_target=gfx906
@@ -99,7 +108,7 @@ spack:
- archer
- argobots
- ascent
- - axom ^umpire@5.0.1 ^raja@0.13.0
+ - axom
- bolt
- cabana
- caliper
@@ -173,7 +182,7 @@ spack:
- swig@4.0.2-fortran
- sz
- tasmanian
- - tau
+ - tau +mpi +python
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml
index f451ceeed3..e8ba238e06 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml
@@ -24,7 +24,7 @@ spack:
# Note skipping spot since no spack package for it
- radiuss:
- ascent # ^conduit@0.6.0
- - axom ^umpire@5.0.1 ^raja@0.13.0
+ - axom
- blt
- caliper
#- care ## ~benchmarks ~examples ~tests
diff --git a/share/spack/keys/tutorial.pub b/share/spack/keys/tutorial.pub
index 57bcef38da..5ea7ea6981 100644
--- a/share/spack/keys/tutorial.pub
+++ b/share/spack/keys/tutorial.pub
@@ -1,29 +1,37 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
-mQINBFoGPeUBEACtR/y+HqInBMMmhoCIoaltG3PnOpRtuLIaiUWRgVCJ3ZbkSNDK
-n2SWgnzQOuh4TeHgr9YEOiPdN8DYNI0Cp+IY2v73cxdMEHPHtVAGJn6fzmEnmFal
-vTSUAgekYfrtJKTtURJ6l+Z1jaX1jqpKY+FTQvdyp08oJiEeDhL57Tx5s0QChvMK
-A+2Hdkiz/FeSVDakQaEil4CBB7oC+MQsUphQUUEVGaLd91iHMsPF78J2SXMJ2E/i
-xz+jcSLnyacaRyQxUBGMY0e4AFNq53GE6aC0bqso5q8JnZH9PP5IiH6XqeRiWCYb
-GqvhW322pDrC83aHHz7dzCafBvcdh0/TlN7L5Qr+zRGVU5X/EdEeSruO0OylzXJp
-RxPc1WW5uY0YMayPXtiESLs9ElczF34aXX5mLokvgcuroIJQY+oUoU4p4r+23gJw
-HsQCCr9vUt1j7SOHhiYhFlMHpTvpaXyPAq6kah/iGl9okA4V6QPI5upPrwLdVWMZ
-LQEYfGAuCfwZub0Mz2qZnvjiew0dCDImMLV8GOdJDN1Ui74OxesHHakdyqbOlSt7
-Z4XatINbnEM5VE1VBUlseOAHu/YTmsqnBu4w7//Ys5I4mFmKvVWgURRjtArE+Z05
-Voa0pD2wDb+5R7DkDqMLECJuxjD+X33vNHMOrA9qcef1WZ3CLtQHqs+abQARAQAB
-tDdzYy10dXRvcmlhbCAoR1BHIGNyZWF0ZWQgZm9yIFNwYWNrKSA8YmVja2VyMzNA
-bGxubC5nb3Y+iQI3BBMBCAAhBQJaBj3lAhsDBQsJCAcCBhUICQoLAgQWAgMBAh4B
-AheAAAoJEJz6SkU7fGmyAr0P/159TwSgTJx0op7B+qhQ1UGac1yktFRyBaQmzkrl
-sVH7HxB2Rsq0ty6uV0PA6JlRRb4efEKMsNLTIx6azNYgUgWY+gf/XXdde2romaHB
-ZK1uFIp9A108HlqCp+Phwtbdp3NmYiTcpH9ZqpWUmz9dWw37zJkqbqi5xLmbqToM
-+hQ49WsjGTE4ngLwwUm8Bxz+nFcyJHjZ1u4KLxTqMNoNjpWbDhM5mDuCYvf7Arwa
-IE7Kk3jaYbrsCOw7Qz0oGAp41kKY3WedFfFhqfUNUR+p0o9N79tfkeQjxWK+Ds0W
-H24d8HUT8n8lHlXo/0DfR9h9VHpnDo9VfXJyVIqr0uzU9FGHzJ/S8WddzgvxnOBl
-Ia4fZTOk+KeQsKJ+Q/X6577F6/S0bQ48D6S09/6HAac6tyrnB3RRPETvGNPOHnA0
-+P8WuhKjojqBO0N8VsTweCCjWDYlF7MEGsHvWiyYYq9gdGY3crWPv8NjMyj+wYhK
-NgF7IHIIgZszRyw2hRlwOdC8w+06AhpDivhE4n6wQlO7ScydQpZm44JJtqGO+cjo
-6akvjsFe5hV1GmeCNp46GYDe2zQ1O2/50prYcNaRfJ9PsfaNmyTGg4NVqBbPNkwR
-uXGiAS2qgeqfS3wORXexHCwYFB2Lcu0qDv7MJosM0cI1saY/eff3+Uw/AkqV51QY
-0ajT
-=Fc8M
+mQENBGATaEABCAC//LQBgVfSPb46EwV+JLwxdfGhbk6EwJFoxiYh5lACqJple+Hg
+8louI+h5KFGxTGflhmVNCx3VfHS2QsPySp+qu3uCsmanQdkHyQkaji4YO5XzJEhI
+IWt6kKfUcttvBSlfXHhtPNvO0PPeRTQypqMlHx4QfJqmuH7jDMZUf3Kg6eLWOuEb
+cW49KeAPfXdGXdq91a7XCNsurvyz8xFxgH/CywXF8pG8LVLB4dfN8b7M+RWxA023
+UZR1f1tYg9eSPRwh7V4E69PcjF7WqvWRw+Uhkes7rUdDR2ZWXhFSn2BvL6p1MdI8
+ZHCFHO6l6QwuYmSIhsCyh21YSwOb/GC0Z8bxABEBAAG0MFNwYWNrIEJ1aWxkIFBp
+cGVsaW5lIChEZW1vIEtleSkgPGtleUBzcGFjay5kZW1vPokBTgQTAQoAOBYhBImQ
+4GrBi1LDHJkUx5Mo0B3n+1WHBQJgE2hAAhsvBQsJCAcCBhUKCQgLAgQWAgMBAh4B
+AheAAAoJEJMo0B3n+1WH7cMH/1Ay1GUB5V9/K+LJGFDLXzCVTqwJbJUB2IeXNpef
+qQfhZWQzOi8qXFCYAIqRlJH3c+rYoQTpR+l7uPS87Q24MLHT/mN6ZP+mI4JLM00T
+CUhs18wN5owNBM7FOs4FTlRmvWhlTCjicCXa0UuH6pB/T35Z/OQVisAUQY82kNu0
+CUkWLfmfNfm9lVOWWMbceJ49sDGWsApYv4ihzzIvnDSS6n5Fg1p8+BEoDbzk2+f5
++Jr0lNXZmQvTx7kGUnwRfuUxJifB8SNbABWL0En2scaE/QACQXkbaNTPMdI8+l59
+ucmvDDsQHlBRXPGRM1ut+1DHSkdkKqjor3TnLkJDz+rOL+K5AQ0EYBNoQAEIAMin
+weK4wrqZhWOotTD3FS6IUh46Jd57jxd9dWBl5YkjvTwHQJQ54csneY/jWaMSoxhJ
+CEuEnb4P/6P0g5lCVYflkXLrCPLrYPJazW0EtTXQ5YRxFT7ISytsQDNgfSQO6irs
+rJlD+OWUGQYeIpa58hB+N9GnM8eka7lxKfay9lM3rn5Nz3E4x10mdgxYY9RzrFHv
+1MTGvNe/wRO67e9s0yJT+JEJ5No5h/c6J0dcrAiegiOvbhUtAYaygCpaxryTz9Bt
+CsSrBOXadzxIEnyp2pJE4vyxCVyHWve2EBk7Fagh45Z+JdA5QhGmNS3tHQmZ6Nyu
+CPAEjzn4k3jjHgoDfTUAEQEAAYkCbAQYAQoAIBYhBImQ4GrBi1LDHJkUx5Mo0B3n
++1WHBQJgE2hAAhsuAUAJEJMo0B3n+1WHwHQgBBkBCgAdFiEEwbGjdbck4hC1jzBi
+DDW/ourdPjAFAmATaEAACgkQDDW/ourdPjDaqgf+Oav1oC+TfOvPyIJgfuZK3Of6
+hvTPW81udyKgmZ/pbwJ4rSAkX2BRe0k11OXSc0V4TEkUPG62lfyNbrb54FsZAaPk
+s2C8G5LW2xlJ91JXIxsFQJcGlWTTrd7IFMe+YtcBHYSBJNmtRXodXO0sYUXCcaMk
+Au/6y6x3m9nhfpqCsc3XZ6C0QxVMMhgrdSfnEPhHV/84m8mqDobU34+eDjnY/l6V
+7ZYycH7Ihtv7z4Ed5Ahasr2FmrMOA9y6VFHeFxmUPhRi2QwFl2TZJ0z8sMosUUg0
+0X2yMfkxnMUrym12NdLYrIMGCPo8vm6UhqY7TZis7N5esBEqmMMsqSH0xuGp8d+e
+B/99W1lQjtdhtE/UEW/wRMQHFoDC2Hyd9jA+NpFK0l7ryft0Jq284b/H4reFffSj
+ctQL123KtOLNFQsG5w2Theo2XtC9tvhYTAK8736bf7CWJFw3oW5OSpvfXntzJpmw
+qcISIERXJPMENLSwUwg7YfpgmSKdrafWSaQEr/e5t2fjf0O3rJfagWH9s0+BetlY
+NhAwpSf7Tm1X+rcep/8rKAsxwhgEQpfn88+2NTzVJDrTSt7CjcbV7nVIdUcK5ki9
+2+262W2wWFNnZ3ofWutFl9yTKEY3RVbxpkzYAIM7Q1vUEpP+rYSCYlUwYudl5Dwb
+BX2VXKOmi9HIn475ykM23BDR
+=magh
-----END PGP PUBLIC KEY BLOCK-----
diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests
index b71103ea31..a0c6e402c6 100755
--- a/share/spack/qa/run-unit-tests
+++ b/share/spack/qa/run-unit-tests
@@ -38,6 +38,7 @@ bin/spack help -a
# Profile and print top 20 lines for a simple call to spack spec
spack -p --lines 20 spec mpileaks%gcc ^dyninst@10.0.0 ^elfutils@0.170
+$coverage_run $(which spack) bootstrap status --dev --optional
#-----------------------------------------------------------
# Run unit tests with code coverage
diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash
index b41a21946c..753767a3de 100755
--- a/share/spack/spack-completion.bash
+++ b/share/spack/spack-completion.bash
@@ -337,7 +337,7 @@ _spack() {
then
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --show-cores --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
else
- SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
+ SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
fi
}
@@ -362,7 +362,7 @@ _spack_add() {
_spack_analyze() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-no-auth --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix"
+ SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix"
else
SPACK_COMPREPLY="list-analyzers run"
fi
@@ -434,10 +434,14 @@ _spack_bootstrap() {
then
SPACK_COMPREPLY="-h --help"
else
- SPACK_COMPREPLY="enable disable reset root list trust untrust"
+ SPACK_COMPREPLY="status enable disable reset root list trust untrust"
fi
}
+_spack_bootstrap_status() {
+ SPACK_COMPREPLY="-h --help --optional --dev"
+}
+
_spack_bootstrap_enable() {
SPACK_COMPREPLY="-h --help --scope"
}
@@ -511,7 +515,7 @@ _spack_buildcache_create() {
_spack_buildcache_install() {
if $list_options
then
- SPACK_COMPREPLY="-h --help -f --force -m --multiple -a --allow-root -u --unsigned -o --otherarch --sha256 --only-root"
+ SPACK_COMPREPLY="-h --help -f --force -m --multiple -a --allow-root -u --unsigned -o --otherarch"
else
_all_packages
fi
@@ -798,7 +802,7 @@ _spack_config_revert() {
}
_spack_containerize() {
- SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-no-auth --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --list-os --last-stage"
+ SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --list-os --last-stage"
}
_spack_create() {
@@ -866,7 +870,7 @@ _spack_deprecate() {
_spack_dev_build() {
if $list_options
then
- SPACK_COMPREPLY="-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum --deprecated --keep-prefix --skip-patch -q --quiet --drop-in --test -b --before -u --until --clean --dirty"
+ SPACK_COMPREPLY="-h --help -j --jobs --reuse -d --source-path -i --ignore-dependencies -n --no-checksum --deprecated --keep-prefix --skip-patch -q --quiet --drop-in --test -b --before -u --until --clean --dirty"
else
_all_packages
fi
@@ -1048,15 +1052,6 @@ _spack_find() {
fi
}
-_spack_flake8() {
- if $list_options
- then
- SPACK_COMPREPLY="-h --help -b --base -a --all -r --root-relative -U --no-untracked -f --fix --no-isort --no-flake8 --no-mypy --black --root"
- else
- SPACK_COMPREPLY=""
- fi
-}
-
_spack_gc() {
SPACK_COMPREPLY="-h --help -y --yes-to-all"
}
@@ -1171,7 +1166,7 @@ _spack_info() {
_spack_install() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --reuse --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-no-auth --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --require-full-hash-match --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
+ SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --reuse --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --require-full-hash-match --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
else
_all_packages
fi
@@ -1277,7 +1272,7 @@ _spack_mirror_destroy() {
_spack_mirror_add() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --scope"
+ SPACK_COMPREPLY="-h --help --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
else
_mirrors
fi
@@ -1304,7 +1299,7 @@ _spack_mirror_rm() {
_spack_mirror_set_url() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --push --scope"
+ SPACK_COMPREPLY="-h --help --push --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
else
_mirrors
fi
@@ -1432,7 +1427,7 @@ _spack_module_tcl_setdefault() {
}
_spack_monitor() {
- SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-no-auth --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix"
+ SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix"
}
_spack_patch() {
diff --git a/var/spack/repos/builtin.mock/packages/autotools-conditional-variants-test/package.py b/var/spack/repos/builtin.mock/packages/autotools-conditional-variants-test/package.py
new file mode 100644
index 0000000000..7a0174fd21
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/autotools-conditional-variants-test/package.py
@@ -0,0 +1,11 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class AutotoolsConditionalVariantsTest(AutotoolsPackage):
+ homepage = "https://www.example.com"
+ has_code = False
+ version('1.0')
+ variant('example', default=True, description='nope', when='@2.0:')
diff --git a/var/spack/repos/builtin.mock/packages/cmake-conditional-variants-test/package.py b/var/spack/repos/builtin.mock/packages/cmake-conditional-variants-test/package.py
new file mode 100644
index 0000000000..53ecd1e287
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/cmake-conditional-variants-test/package.py
@@ -0,0 +1,9 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+class CmakeConditionalVariantsTest(CMakePackage):
+ homepage = "https://dev.null"
+ version('1.0')
+ variant('example', default=True, description='nope', when='@2.0:')
diff --git a/var/spack/repos/builtin.mock/packages/extends-spec/package.py b/var/spack/repos/builtin.mock/packages/extends-spec/package.py
new file mode 100644
index 0000000000..56e887b772
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/extends-spec/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class ExtendsSpec(Package):
+ """Package that tests if the extends directive supports a spec."""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/example-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ extends('extendee@1:')
diff --git a/var/spack/repos/builtin.mock/packages/preferred-test/package.py b/var/spack/repos/builtin.mock/packages/preferred-test/package.py
index 41ee58aeb6..2ef586192a 100644
--- a/var/spack/repos/builtin.mock/packages/preferred-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/preferred-test/package.py
@@ -7,11 +7,9 @@ from spack import *
class PreferredTest(Package):
- """Dummy package with develop version and preffered version"""
- homepage = "http://www.openblas.net"
- url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+ """Dummy package with develop version and preferred version"""
+ homepage = "https://github.com/LLNL/mpileaks"
+ url = "https://github.com/LLNL/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
- version('develop', git='https://github.com/dummy/repo.git')
- version('0.2.16', 'b1190f3d3471685f17cfd1ec1d252ac9')
- version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9', preferred=True)
- version('0.2.14', 'b1190f3d3471685f17cfd1ec1d252ac9')
+ version('develop', git='https://github.com/LLNL/mpileaks.git')
+ version('1.0', sha256='2e34cc4505556d1c1f085758e26f2f8eea0972db9382f051b2dcfb1d7d9e1825', preferred=True)
diff --git a/var/spack/repos/builtin/packages/3dtk/package.py b/var/spack/repos/builtin/packages/3dtk/package.py
index 3f64b12393..d578c3c6ca 100644
--- a/var/spack/repos/builtin/packages/3dtk/package.py
+++ b/var/spack/repos/builtin/packages/3dtk/package.py
@@ -37,7 +37,7 @@ class _3dtk(CMakePackage):
depends_on('cmake@3.5:', when='@trunk', type='build')
depends_on('cmake@2.6.1:2', when='@1.2', type='build')
depends_on('ninja', type='build')
- depends_on('boost+serialization+graph+regex+filesystem+system+thread+date_time+program_options')
+ depends_on('boost@:1.75+serialization+graph+regex+filesystem+system+thread+date_time+program_options')
depends_on('suite-sparse')
depends_on('zlib')
depends_on('libpng')
diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py
index db19c5af04..438d2e3e4a 100644
--- a/var/spack/repos/builtin/packages/abinit/package.py
+++ b/var/spack/repos/builtin/packages/abinit/package.py
@@ -55,6 +55,9 @@ class Abinit(AutotoolsPackage):
values=('safe', 'standard', 'aggressive'),
description='Select the optimization flavor to use.')
+ variant('install-tests', default=False,
+ description='Install test cases')
+
# Add dependencies
depends_on('atompaw')
depends_on('blas')
@@ -276,3 +279,8 @@ class Abinit(AutotoolsPackage):
# requires Python with numpy, pyyaml, pandas
if '~mpi' in self.spec:
make('tests_in')
+
+ def install(self, spec, prefix):
+ make('install')
+ if '+install-tests' in spec:
+ install_tree('tests', spec.prefix.tests)
diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py
index dedf13d4ef..8640c461b4 100644
--- a/var/spack/repos/builtin/packages/acts/package.py
+++ b/var/spack/repos/builtin/packages/acts/package.py
@@ -39,6 +39,8 @@ class Acts(CMakePackage, CudaPackage):
# Supported Acts versions
version('main', branch='main')
version('master', branch='main', deprecated=True) # For compatibility
+ version('15.1.0', commit='a96e6db7de6075e85b6d5346bc89845eeb89b324', submodules=True)
+ version('15.0.0', commit='0fef9e0831a90e946745390882aac871b211eaac', submodules=True)
version('14.1.0', commit='e883ab6acfe5033509ad1c27e8e2ba980dfa59f6', submodules=True)
version('14.0.0', commit='f902bef81b60133994315c13f7d32d60048c79d8', submodules=True)
version('13.0.0', commit='ad05672e48b693fd37156f1ad62ed57aa82f858c', submodules=True)
@@ -110,43 +112,44 @@ class Acts(CMakePackage, CudaPackage):
version('0.08.0', commit='99eedb38f305e3a1cd99d9b4473241b7cd641fa9')
# Variants that affect the core Acts library
- variant('benchmarks', default=False, description='Build the performance benchmarks')
- variant('examples', default=False, description='Build the examples')
+ variant('benchmarks', default=False, description='Build the performance benchmarks', when='@0.16:')
+ variant('examples', default=False, description='Build the examples', when='@0.23: +digitization +fatras +identification +json +tgeo')
variant('integration_tests', default=False, description='Build the integration tests')
variant('unit_tests', default=False, description='Build the unit tests')
variant('log_failure_threshold', default='MAX', description='Log level above which examples should auto-crash')
# Variants that enable / disable Acts plugins
- variant('autodiff', default=False, description='Build the auto-differentiation plugin')
- variant('dd4hep', default=False, description='Build the DD4hep plugin')
+ variant('autodiff', default=False, description='Build the auto-differentiation plugin', when='@1.2:')
+ variant('dd4hep', default=False, description='Build the DD4hep plugin', when='+tgeo')
variant('digitization', default=False, description='Build the geometric digitization plugin')
- variant('fatras', default=False, description='Build the FAst TRAcking Simulation package')
+ variant('fatras', default=False, description='Build the FAst TRAcking Simulation package', when='@0.16:')
variant('fatras_geant4', default=False, description='Build Geant4 Fatras package')
variant('identification', default=False, description='Build the Identification plugin')
variant('json', default=False, description='Build the Json plugin')
variant('legacy', default=False, description='Build the Legacy package')
# FIXME: Cannot build ONNX plugin as Spack doesn't have an ONNX runtime
# FIXME: Cannot build SyCL plugin yet as Spack doesn't have SyCL support
- variant('tgeo', default=False, description='Build the TGeo plugin')
- variant('alignment', default=False, description='Build the alignment package')
+ variant('tgeo', default=False, description='Build the TGeo plugin', when='+identification')
+ variant('alignment', default=False, description='Build the alignment package', when='@13:')
# Variants that only affect Acts examples for now
- variant('geant4', default=False, description='Build the Geant4-based examples')
- variant('hepmc3', default=False, description='Build the HepMC3-based examples')
- variant('pythia8', default=False, description='Build the Pythia8-based examples')
- variant('python', default=False, description='Build python bindings for the examples')
+ variant('geant4', default=False, description='Build the Geant4-based examples', when='@0.23: +examples')
+ variant('hepmc3', default=False, description='Build the HepMC3-based examples', when='@0.23: +examples')
+ variant('pythia8', default=False, description='Build the Pythia8-based examples', when='@0.23: +examples')
+ variant('python', default=False, description='Build python bindings for the examples', when='@14: +examples')
variant('analysis', default=False, description='Build analysis applications in the examples')
# Build dependencies
- # FIXME: Use spack's autodiff package once there is one
# FIXME: Use spack's vecmem package once there is one
# (https://github.com/acts-project/acts/pull/998)
+ depends_on('autodiff @0.5.11:', when='@1.2: +autodiff')
depends_on('boost @1.62:1.69 +program_options +test', when='@:0.10.3')
depends_on('boost @1.71: +filesystem +program_options +test', when='@0.10.4:')
depends_on('cmake @3.14:', type='build')
- depends_on('dd4hep @1.11:', when='+dd4hep')
- depends_on('dd4hep @1.11: +geant4', when='+dd4hep +geant4')
- depends_on('eigen @3.3.7:')
+ depends_on('dd4hep @1.11: +dddetectors +ddrec', when='+dd4hep')
+ depends_on('dd4hep +ddg4', when='+dd4hep +geant4 +examples')
+ depends_on('eigen @3.3.7:', when='@15.1:')
+ depends_on('eigen @3.3.7:3.3.99', when='@:15.0')
depends_on('geant4', when='+fatras_geant4')
depends_on('geant4', when='+geant4')
depends_on('hepmc3 @3.2.1:', when='+hepmc3')
@@ -159,27 +162,7 @@ class Acts(CMakePackage, CudaPackage):
depends_on('root @6.10: cxxstd=14', when='+tgeo @:0.8.0')
depends_on('root @6.20: cxxstd=17', when='+tgeo @0.8.1:')
- # Some variant combinations do not make sense
- conflicts('+autodiff', when='@:1.01')
- conflicts('+benchmarks', when='@:0.15')
- conflicts('+dd4hep', when='-tgeo')
- conflicts('+examples', when='@:0.22')
- conflicts('+examples', when='-digitization')
- conflicts('+examples', when='-fatras')
- conflicts('+examples', when='-identification')
- conflicts('+examples', when='-json')
- conflicts('+examples', when='-tgeo')
- conflicts('+fatras', when='@:0.15')
- conflicts('+geant4', when='@:0.22')
- conflicts('+geant4', when='-examples')
- conflicts('+hepmc3', when='@:0.22')
- conflicts('+hepmc3', when='-examples')
- conflicts('+pythia8', when='@:0.22')
- conflicts('+pythia8', when='-examples')
- conflicts('+python', when='@:13')
- conflicts('+python', when='-examples')
- conflicts('+tgeo', when='-identification')
- conflicts('+alignment', when='@:12')
+ # ACTS has been using C++17 for a while, which precludes use of old GCC
conflicts('%gcc@:7', when='@0.23:')
def cmake_args(self):
@@ -238,16 +221,18 @@ class Acts(CMakePackage, CudaPackage):
log_failure_threshold = spec.variants['log_failure_threshold'].value
args.append("-DACTS_LOG_FAILURE_THRESHOLD={0}".format(log_failure_threshold))
- cuda_arch = spec.variants['cuda_arch'].value
- if cuda_arch != 'none':
- args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch[0]))
+ if '+cuda' in spec:
+ cuda_arch = spec.variants['cuda_arch'].value
+ if cuda_arch != 'none':
+ args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch[0]))
if 'root' in spec:
cxxstd = spec['root'].variants['cxxstd'].value
args.append("-DCMAKE_CXX_STANDARD={0}".format(cxxstd))
- # FIXME: Once we can use spack's autodiff package, set
- # ACTS_USE_SYSTEM_AUTODIFF too.
+ if spec.satisfies('+autodiff'):
+ args.append("-DACTS_USE_SYSTEM_AUTODIFF=ON")
+
if spec.satisfies('@0.33: +json'):
args.append("-DACTS_USE_SYSTEM_NLOHMANN_JSON=ON")
elif spec.satisfies('@0.14.0: +json'):
diff --git a/var/spack/repos/builtin/packages/adol-c/package.py b/var/spack/repos/builtin/packages/adol-c/package.py
index 404a511236..3c8442feda 100644
--- a/var/spack/repos/builtin/packages/adol-c/package.py
+++ b/var/spack/repos/builtin/packages/adol-c/package.py
@@ -30,6 +30,8 @@ class AdolC(AutotoolsPackage):
description='Enable advanced branching to reduce retaping')
variant('atrig_erf', default=True,
description='Enable arc-trig and error functions')
+ variant('stdczero', default=True,
+ description='Enable default initialization for the adouble datatype')
variant('doc', default=True, description='Install documentation')
variant('openmp', default=False, description='Enable OpenMP support')
variant('sparse', default=False, description='Enable sparse drivers')
@@ -70,25 +72,17 @@ class AdolC(AutotoolsPackage):
'--with-boost=no'
)
- if '+advanced_branching' in spec:
- configure_args.append(
- '--enable-advanced-branching'
- )
-
- if '+atrig_erf' in spec:
- configure_args.append(
- '--enable-atrig-erf'
- )
-
if '+openmp' in spec:
configure_args.append(
'--with-openmp-flag={0}'.format(self.compiler.openmp_flag)
)
- if '+sparse' in spec:
- configure_args.append(
- '--enable-sparse'
- )
+ configure_args.extend(self.enable_or_disable('advanced-branching',
+ variant='advanced_branching'))
+
+ configure_args.extend(self.enable_or_disable('atrig-erf', variant='atrig_erf'))
+ configure_args.extend(self.enable_or_disable('sparse'))
+ configure_args.extend(self.enable_or_disable('stdczero'))
# We can simply use the bundled examples to check
# whether Adol-C works as expected
diff --git a/var/spack/repos/builtin/packages/amdblis/package.py b/var/spack/repos/builtin/packages/amdblis/package.py
index 22fa9165eb..7e3ebba99f 100644
--- a/var/spack/repos/builtin/packages/amdblis/package.py
+++ b/var/spack/repos/builtin/packages/amdblis/package.py
@@ -22,17 +22,50 @@ class Amdblis(BlisBase):
maintainers = ['amd-toolchain-support']
+ version('3.1', sha256='2891948925b9db99eec02a1917d9887a7bee9ad2afc5421c9ba58602a620f2bf')
+ version('3.0.1', sha256='dff643e6ef946846e91e8f81b75ff8fe21f1f2d227599aecd654d184d9beff3e')
version('3.0', sha256='ac848c040cd6c3550fe49148dbdf109216cad72d3235763ee7ee8134e1528517')
version('2.2', sha256='e1feb60ac919cf6d233c43c424f6a8a11eab2c62c2c6e3f2652c15ee9063c0c9')
+ variant(
+ 'ilp64',
+ default=False,
+ description='Build with ILP64 support')
+
+ conflicts(
+ '+ilp64',
+ when='@:3.0.0',
+ msg='ilp64 is supported from amdblis 3.0.1 version onwards')
+
def configure_args(self):
spec = self.spec
args = super(Amdblis, self).configure_args()
- if spec.satisfies('@3.0 %aocc'):
- """ To enabled Fortran to C calling convention for
- complex types when compiling with aocc flang"""
+ if spec.satisfies('+ilp64'):
+ args.append('--blas-int-size=64')
+
+ """ To enable Fortran to C calling convention for
+ complex types when compiling with aocc flang"""
+ if self.spec.satisfies("@3.0 %aocc"):
args.append('CFLAGS={0}'.format("-DAOCL_F2C"))
args.append('CXXFLAGS={0}'.format("-DAOCL_F2C"))
+ elif self.spec.satisfies("@3.0.1: %aocc"):
+ args.append('--complex-return=intel')
+
+ if self.spec.satisfies("@3.1:"):
+ args.append('--disable-aocl-dynamic')
return args
+
+ def configure(self, spec, prefix):
+ config_args = self.configure_args()
+
+ # "amdzen" - A fat binary or multiarchitecture binary
+ # support for 3.1 release onwards
+ if self.spec.satisfies("@3.1:"):
+ config_args.append("amdzen")
+ else:
+ config_args.append("auto")
+
+ configure("--prefix=" + prefix,
+ *config_args)
diff --git a/var/spack/repos/builtin/packages/amdfftw/package.py b/var/spack/repos/builtin/packages/amdfftw/package.py
index a43baa1094..8e77b17199 100644
--- a/var/spack/repos/builtin/packages/amdfftw/package.py
+++ b/var/spack/repos/builtin/packages/amdfftw/package.py
@@ -31,36 +31,150 @@ class Amdfftw(FftwBase):
maintainers = ['amd-toolchain-support']
+ version('3.1', sha256='3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132')
+ version('3.0.1', sha256='87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c')
version('3.0', sha256='a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd')
version('2.2', sha256='de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8')
- variant('shared', default=True, description="Builds a shared version of the library")
- variant('openmp', default=True, description="Enable OpenMP support")
- variant('threads', default=False, description="Enable SMP threads support")
- variant('debug', default=False, description="Builds a debug version of the library")
+ variant('shared', default=True,
+ description='Builds a shared version of the library')
+ variant('openmp', default=True,
+ description='Enable OpenMP support')
+ variant('threads', default=False,
+ description='Enable SMP threads support')
+ variant('debug', default=False,
+ description='Builds a debug version of the library')
variant(
'amd-fast-planner',
default=False,
- description="Option to reduce the planning time without much"
- "tradeoff in the performance. It is supported for"
- "Float and double precisions only.")
+ description='Option to reduce the planning time without much'
+ 'tradeoff in the performance. It is supported for'
+ 'Float and double precisions only.')
+ variant(
+ 'amd-top-n-planner',
+ default=False,
+ description='Build with amd-top-n-planner support')
+ variant(
+ 'amd-mpi-vader-limit',
+ default=False,
+ description='Build with amd-mpi-vader-limit support')
+ variant(
+ 'static',
+ default=False,
+ description='Build with static suppport')
+ variant(
+ 'amd-trans',
+ default=False,
+ description='Build with amd-trans suppport')
+ variant(
+ 'amd-app-opt',
+ default=False,
+ description='Build with amd-app-opt suppport')
depends_on('texinfo')
provides('fftw-api@3', when='@2:')
- conflicts('precision=quad', when='@2.2 %aocc', msg="AOCC clang doesn't support quad precision")
- conflicts('+debug', when='@2.2 %aocc', msg="AOCC clang doesn't support debug")
- conflicts('%gcc@:7.2', when="@2.2:", msg="Required GCC version above 7.2 for AMDFFTW")
- conflicts('+amd-fast-planner', when="@2.2", msg="amd-fast-planner is supported from 3.0 onwards")
+ conflicts(
+ 'precision=quad',
+ when='@2.2 %aocc',
+ msg='Quad precision is not supported by AOCC clang version 2.2')
+ conflicts(
+ '+debug',
+ when='@2.2 %aocc',
+ msg='debug mode is not supported by AOCC clang version 2.2')
+ conflicts(
+ '%gcc@:7.2',
+ when='@2.2:',
+ msg='GCC version above 7.2 is required for AMDFFTW')
+ conflicts(
+ '+amd-fast-planner ',
+ when='+mpi',
+ msg='mpi thread is not supported with amd-fast-planner')
+ conflicts(
+ '+amd-fast-planner',
+ when='@2.2',
+ msg='amd-fast-planner is supported from 3.0 onwards')
conflicts(
'+amd-fast-planner',
when='precision=quad',
- msg="amd-fast-planner doesn't support quad precision")
+ msg='Quad precision is not supported with amd-fast-planner')
conflicts(
'+amd-fast-planner',
when='precision=long_double',
- msg="amd-fast-planner doesn't support long_double precision")
+ msg='long_double precision is not supported with amd-fast-planner')
+ conflicts(
+ '+amd-top-n-planner',
+ when='@:3.0.0',
+ msg='amd-top-n-planner is supported from 3.0.1 onwards')
+ conflicts(
+ '+amd-top-n-planner',
+ when='precision=long_double',
+ msg='long_double precision is not supported with amd-top-n-planner')
+ conflicts(
+ '+amd-top-n-planner',
+ when='precision=quad',
+ msg='Quad precision is not supported with amd-top-n-planner')
+ conflicts(
+ '+amd-top-n-planner',
+ when='+amd-fast-planner',
+ msg='amd-top-n-planner cannot be used with amd-fast-planner')
+ conflicts(
+ '+amd-top-n-planner',
+ when='+threads',
+ msg='amd-top-n-planner works only for single thread')
+ conflicts(
+ '+amd-top-n-planner',
+ when='+mpi',
+ msg='mpi thread is not supported with amd-top-n-planner')
+ conflicts(
+ '+amd-top-n-planner',
+ when='+openmp',
+ msg='openmp thread is not supported with amd-top-n-planner')
+ conflicts(
+ '+amd-mpi-vader-limit',
+ when='@:3.0.0',
+ msg='amd-mpi-vader-limit is supported from 3.0.1 onwards')
+ conflicts(
+ '+amd-mpi-vader-limit',
+ when='precision=quad',
+ msg='Quad precision is not supported with amd-mpi-vader-limit')
+ conflicts(
+ '+amd-trans',
+ when='+threads',
+ msg='amd-trans works only for single thread')
+ conflicts(
+ '+amd-trans',
+ when='+mpi',
+ msg='mpi thread is not supported with amd-trans')
+ conflicts(
+ '+amd-trans',
+ when='+openmp',
+ msg='openmp thread is not supported with amd-trans')
+ conflicts(
+ '+amd-trans',
+ when='precision=long_double',
+ msg='long_double precision is not supported with amd-trans')
+ conflicts(
+ '+amd-trans',
+ when='precision=quad',
+ msg='Quad precision is not supported with amd-trans')
+ conflicts(
+ '+amd-app-opt',
+ when='@:3.0.1',
+ msg='amd-app-opt is supported from 3.1 onwards')
+ conflicts(
+ '+amd-app-opt',
+ when='+mpi',
+ msg='mpi thread is not supported with amd-app-opt')
+ conflicts(
+ '+amd-app-opt',
+ when='precision=long_double',
+ msg='long_double precision is not supported with amd-app-opt')
+ conflicts(
+ '+amd-app-opt',
+ when='precision=quad',
+ msg='Quad precision is not supported with amd-app-opt')
def configure(self, spec, prefix):
"""Configure function"""
@@ -72,28 +186,13 @@ class Amdfftw(FftwBase):
# Check if compiler is AOCC
if '%aocc' in spec:
- options.append("CC={0}".format(os.path.basename(spack_cc)))
- options.append("FC={0}".format(os.path.basename(spack_fc)))
- options.append("F77={0}".format(os.path.basename(spack_fc)))
-
- if '+shared' in spec:
- options.append('--enable-shared')
- else:
- options.append('--disable-shared')
+ options.append('CC={0}'.format(os.path.basename(spack_cc)))
+ options.append('FC={0}'.format(os.path.basename(spack_fc)))
+ options.append('F77={0}'.format(os.path.basename(spack_fc)))
if '+debug' in spec:
options.append('--enable-debug')
- if '+openmp' in spec:
- options.append('--enable-openmp')
- else:
- options.append('--disable-openmp')
-
- if '+threads' in spec:
- options.append('--enable-threads')
- else:
- options.append('--disable-threads')
-
if '+mpi' in spec:
options.append('--enable-mpi')
options.append('--enable-amd-mpifft')
@@ -101,13 +200,18 @@ class Amdfftw(FftwBase):
options.append('--disable-mpi')
options.append('--disable-amd-mpifft')
- if '+amd-fast-planner' in spec:
- options.append('--enable-amd-fast-planner')
- else:
- options.append('--disable-amd-fast-planner')
+ options.extend(self.enable_or_disable('shared'))
+ options.extend(self.enable_or_disable('openmp'))
+ options.extend(self.enable_or_disable('threads'))
+ options.extend(self.enable_or_disable('amd-fast-planner'))
+ options.extend(self.enable_or_disable('amd-top-n-planner'))
+ options.extend(self.enable_or_disable('amd-mpi-vader-limit'))
+ options.extend(self.enable_or_disable('static'))
+ options.extend(self.enable_or_disable('amd-trans'))
+ options.extend(self.enable_or_disable('amd-app-opt'))
if not self.compiler.f77 or not self.compiler.fc:
- options.append("--disable-fortran")
+ options.append('--disable-fortran')
# Cross compilation is supported in amd-fftw by making use of target
# variable to set AMD_ARCH configure option.
@@ -115,9 +219,9 @@ class Amdfftw(FftwBase):
# use target variable to set appropriate -march option in AMD_ARCH.
arch = spec.architecture
options.append(
- "AMD_ARCH={0}".format(
+ 'AMD_ARCH={0}'.format(
arch.target.optimization_flags(
- spec.compiler).split("=")[-1]))
+ spec.compiler).split('=')[-1]))
# Specific SIMD support.
# float and double precisions are supported
diff --git a/var/spack/repos/builtin/packages/amdlibflame/package.py b/var/spack/repos/builtin/packages/amdlibflame/package.py
index 56e97e5894..fc27de6d52 100644
--- a/var/spack/repos/builtin/packages/amdlibflame/package.py
+++ b/var/spack/repos/builtin/packages/amdlibflame/package.py
@@ -36,11 +36,22 @@ class Amdlibflame(LibflameBase):
maintainers = ['amd-toolchain-support']
+ version('3.1', sha256='97c74086306fa6dea9233a3730407c400c196b55f4461d4861364b1ac131ca42')
+ version('3.0.1', sha256='5859e7b39ffbe73115dd598b035f212d36310462cf3a45e555a5087301710776')
version('3.0', sha256='d94e08b688539748571e6d4c1ec1ce42732eac18bd75de989234983c33f01ced')
version('2.2', sha256='12b9c1f92d2c2fa637305aaa15cf706652406f210eaa5cbc17aaea9fcfa576dc')
+ variant('ilp64', default=False, description='Build with ILP64 support')
+
+ conflicts('+ilp64', when="@:3.0.0",
+ msg="ILP64 is supported from 3.0.1 onwards")
+ conflicts('threads=pthreads',
+ msg='pthread is not supported')
+ conflicts('threads=openmp',
+ msg='openmp is not supported')
+
patch('aocc-2.2.0.patch', when="@:2", level=1)
- patch('cray-compiler-wrapper.patch')
+ patch('cray-compiler-wrapper.patch', when="@:3.0.0", level=1)
provides('flame@5.2', when='@2:')
@@ -64,8 +75,29 @@ class Amdlibflame(LibflameBase):
if "@3.0: %aocc" in self.spec:
args.append("--enable-f2c-dotc")
+ if "@3.0.1: +ilp64" in self.spec:
+ args.append("--enable-ilp64")
+
+ if "@3.1: %aocc" in self.spec:
+ args.append("--enable-void-return-complex")
+
+ if "@3.1: " in self.spec:
+ args.append("--enable-blas-ext-gemmt")
+
return args
+ @run_after('build')
+ @on_package_attributes(run_tests=True)
+ def check(self):
+ """make check for single and multithread"""
+ blas_flags = self.spec['blas'].libs.ld_flags
+ if self.spec.variants['threads'].value != 'none':
+ make('check',
+ 'LIBBLAS = -fopenmp {0}'.format(blas_flags), parallel=False)
+ else:
+ make('check',
+ 'LIBBLAS = {0}'.format(blas_flags), parallel=False)
+
def install(self, spec, prefix):
"""make install function"""
# make install in parallel fails with message 'File already exists'
diff --git a/var/spack/repos/builtin/packages/amdlibm/0001-libm-ose-Scripts-cleanup-pyc-files.patch b/var/spack/repos/builtin/packages/amdlibm/0001-libm-ose-Scripts-cleanup-pyc-files.patch
index fc084b7d4b..fc084b7d4b 100755..100644
--- a/var/spack/repos/builtin/packages/amdlibm/0001-libm-ose-Scripts-cleanup-pyc-files.patch
+++ b/var/spack/repos/builtin/packages/amdlibm/0001-libm-ose-Scripts-cleanup-pyc-files.patch
diff --git a/var/spack/repos/builtin/packages/amdlibm/0002-libm-ose-prevent-log-v3.c-from-building.patch b/var/spack/repos/builtin/packages/amdlibm/0002-libm-ose-prevent-log-v3.c-from-building.patch
index b540f1cda3..b540f1cda3 100755..100644
--- a/var/spack/repos/builtin/packages/amdlibm/0002-libm-ose-prevent-log-v3.c-from-building.patch
+++ b/var/spack/repos/builtin/packages/amdlibm/0002-libm-ose-prevent-log-v3.c-from-building.patch
diff --git a/var/spack/repos/builtin/packages/amdlibm/package.py b/var/spack/repos/builtin/packages/amdlibm/package.py
index c38c98fd1e..fa88f3bf56 100644
--- a/var/spack/repos/builtin/packages/amdlibm/package.py
+++ b/var/spack/repos/builtin/packages/amdlibm/package.py
@@ -25,6 +25,7 @@ class Amdlibm(SConsPackage):
# of master branch.
# To install amdlibm from latest master branch:
# spack install amdlibm ^amdlibm@master
+ version("3.1", branch="aocl-3.1")
version("3.0", branch="aocl-3.0")
version("2.2", commit="4033e022da428125747e118ccd6fdd9cee21c470")
@@ -32,6 +33,7 @@ class Amdlibm(SConsPackage):
description="Building with verbosity")
# Mandatory dependencies
+ depends_on("python@3.6.2", when="%aocc@3.2.0:", type=("build", "run"))
depends_on("python@3.6.1:", type=("build", "run"))
depends_on("scons@3.1.2:", type=("build"))
depends_on("mpfr", type=("link"))
@@ -39,25 +41,35 @@ class Amdlibm(SConsPackage):
patch("0001-libm-ose-Scripts-cleanup-pyc-files.patch", when="@2.2")
patch("0002-libm-ose-prevent-log-v3.c-from-building.patch", when="@2.2")
- conflicts("%gcc@:9.1", msg="Minimum required GCC version is 9.2.0")
+ conflicts(
+ '%gcc@:9.1.0',
+ msg='Minimum required GCC version is 9.2.0')
+ conflicts(
+ '%gcc@11.2.0:',
+ msg='Maximum required GCC version is 11.1.0')
+ conflicts(
+ '%aocc@3.2.0',
+ when='@2.2:3.0',
+ msg='amdlibm 2.2 and 3.0 versions are not supported with AOCC 3.2.0')
def build_args(self, spec, prefix):
"""Setting build arguments for amdlibm """
args = ["--prefix={0}".format(prefix)]
- if "%aocc" in spec:
- args.append("--compiler=aocc")
-
# we are circumventing the use of
# Spacks compiler wrappers because
# SCons wipes out all environment variables.
- args.append("CC={0}".format(self.compiler.cc))
- args.append("CXX={0}".format(self.compiler.cxx))
+ if spec.satisfies("@:3.0 %aocc"):
+ args.append("--compiler=aocc")
+
+ var_prefix = '' if spec.satisfies("@:3.0") else 'ALM_'
+ args.append("{0}CC={1}".format(var_prefix, self.compiler.cc))
+ args.append("{0}CXX={1}".format(var_prefix, self.compiler.cxx))
if "+verbose" in spec:
- args.append("verbose=1")
+ args.append("--verbose=1")
else:
- args.append("verbose=0")
+ args.append("--verbose=0")
return args
diff --git a/var/spack/repos/builtin/packages/amdscalapack/package.py b/var/spack/repos/builtin/packages/amdscalapack/package.py
index 23ccc2ccb3..d427760dcb 100644
--- a/var/spack/repos/builtin/packages/amdscalapack/package.py
+++ b/var/spack/repos/builtin/packages/amdscalapack/package.py
@@ -20,11 +20,11 @@ class Amdscalapack(ScalapackBase):
_name = 'amdscalapack'
homepage = "https://developer.amd.com/amd-aocl/scalapack/"
- url = "https://github.com/amd/scalapack/archive/3.0.tar.gz"
git = "https://github.com/amd/scalapack.git"
maintainers = ['amd-toolchain-support']
+ version('3.1', sha256='4c2ee2c44644a0feec0c6fc1b1a413fa9028f14d7035d43a398f5afcfdbacb98')
version('3.0', sha256='6e6f3578f44a8e64518d276e7580530599ecfa8729f568303ed2590688e7096f')
version('2.2', sha256='2d64926864fc6d12157b86e3f88eb1a5205e7fc157bf67e7577d0f18b9a7484c')
@@ -33,6 +33,21 @@ class Amdscalapack(ScalapackBase):
default='Release',
description='CMake build type',
values=('Release', 'RelWithDebInfo'))
+ variant(
+ 'ilp64',
+ default=False,
+ description='Build with ILP64 support')
+
+ conflicts('+ilp64', when="@:3.0",
+ msg="ILP64 is supported from 3.1 onwards")
+
+ def url_for_version(self, version):
+ if version == Version('3.1'):
+ return "https://github.com/amd/aocl-scalapack/archive/3.1.tar.gz"
+ elif version == Version('3.0'):
+ return "https://github.com/amd/scalapack/archive/3.0.tar.gz"
+ elif version == Version('2.2'):
+ return "https://github.com/amd/scalapack/archive/2.2.tar.gz"
def cmake_args(self):
""" cmake_args function"""
@@ -47,8 +62,13 @@ class Amdscalapack(ScalapackBase):
args.extend(['-DUSE_DOTC_WRAPPER:BOOL=%s' % (
'ON' if spec.satisfies('%aocc ^amdblis') else 'OFF')])
+ # -DENABLE_ILP64:BOOL=ON
+ args.extend([self.define_from_variant('ENABLE_ILP64', 'ilp64')])
+
+ # -DUSE_F2C:BOOL=ON
+ args.extend([self.define('USE_F2C', spec.satisfies('@:3.0'))])
+
args.extend([
- '-DUSE_F2C=ON',
'-DLAPACK_FOUND=true',
'-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
'-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc,
diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py
index 8a86bf97d3..a06369066c 100644
--- a/var/spack/repos/builtin/packages/amrex/package.py
+++ b/var/spack/repos/builtin/packages/amrex/package.py
@@ -12,7 +12,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
mesh refinement (AMR) applications."""
homepage = "https://amrex-codes.github.io/amrex/"
- url = "https://github.com/AMReX-Codes/amrex/releases/download/21.11/amrex-21.11.tar.gz"
+ url = "https://github.com/AMReX-Codes/amrex/releases/download/21.12/amrex-21.12.tar.gz"
git = "https://github.com/AMReX-Codes/amrex.git"
tags = ['ecp', 'e4s']
@@ -20,6 +20,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
maintainers = ['WeiqunZhang', 'asalmgren']
version('develop', branch='development')
+ version('21.12', sha256='439f9ebf2b440fc739a7976f3ade188ec3e1de5f51a0b151e6b8dda36fa67278')
version('21.11', sha256='2edb72d7cf7e86340fcaceb325368560957bcd952fd34cd501bfdf038e1338a4')
version('21.10', sha256='a11954c03b1ec26c26b676460dc5de5195469e813b70fbcea6dfdefeafaf5407')
version('21.09', sha256='983b41d93bf9417c032080fd2ec7c04d0d2b820e613a076bd07566aa5a8aa4bd')
diff --git a/var/spack/repos/builtin/packages/ants/package.py b/var/spack/repos/builtin/packages/ants/package.py
index 6c3c2232af..caf63d8f4b 100644
--- a/var/spack/repos/builtin/packages/ants/package.py
+++ b/var/spack/repos/builtin/packages/ants/package.py
@@ -27,6 +27,7 @@ class Ants(CMakePackage):
def cmake_args(self):
return [
+ "-DBUILD_TESTING=OFF", # needed for <= 2.3.5 due to ANTs/#1236
self.define_from_variant('ITK_BUILD_MINC_SUPPORT', 'minc')
]
diff --git a/var/spack/repos/builtin/packages/aocc/package.py b/var/spack/repos/builtin/packages/aocc/package.py
index f54b173c7c..9ecd0fef91 100755..100644
--- a/var/spack/repos/builtin/packages/aocc/package.py
+++ b/var/spack/repos/builtin/packages/aocc/package.py
@@ -32,6 +32,8 @@ class Aocc(Package):
maintainers = ['amd-toolchain-support']
+ version(ver="3.2.0", sha256='8493525b3df77f48ee16f3395a68ad4c42e18233a44b4d9282b25dbb95b113ec',
+ url='https://developer.amd.com/wordpress/media/files/aocc-compiler-3.2.0.tar')
version(ver="3.1.0", sha256='1948104a430506fe5e445c0c796d6956109e7cc9fc0a1e32c9f1285cfd566d0c',
url='https://developer.amd.com/wordpress/media/files/aocc-compiler-3.1.0.tar')
version(ver="3.0.0", sha256='4ff269b1693856b9920f57e3c85ce488c8b81123ddc88682a3ff283979362227',
diff --git a/var/spack/repos/builtin/packages/aocl-sparse/package.py b/var/spack/repos/builtin/packages/aocl-sparse/package.py
index f23ef06027..426e17faa3 100644
--- a/var/spack/repos/builtin/packages/aocl-sparse/package.py
+++ b/var/spack/repos/builtin/packages/aocl-sparse/package.py
@@ -20,6 +20,7 @@ class AoclSparse(CMakePackage):
maintainers = ['amd-toolchain-support']
+ version('3.1', sha256='8536f06095c95074d4297a3d2910654085dd91bce82e116c10368a9f87e9c7b9')
version('3.0', sha256='1d04ba16e04c065051af916b1ed9afce50296edfa9b1513211a7378e1d6b952e')
version('2.2', sha256='33c2ed6622cda61d2613ee63ff12c116a6cd209c62e54307b8fde986cd65f664')
@@ -34,6 +35,7 @@ class AoclSparse(CMakePackage):
description='Build with ILP64 support')
depends_on('boost', when='@2.2')
+ depends_on('cmake@3.5:', type='build')
@property
def build_directory(self):
diff --git a/var/spack/repos/builtin/packages/aoflagger/package.py b/var/spack/repos/builtin/packages/aoflagger/package.py
index e44961bcfd..34daa97a9d 100644
--- a/var/spack/repos/builtin/packages/aoflagger/package.py
+++ b/var/spack/repos/builtin/packages/aoflagger/package.py
@@ -15,7 +15,7 @@ class Aoflagger(CMakePackage):
version('2.10.0', sha256='3ec1188d37101acf2029575ebc09c50b19c158c88a12b55ac5d25a96bd8fc18d')
- depends_on('casacore+python+fftw@1.10:')
+ depends_on('casacore+python~fftpack@1.10:')
depends_on('fftw~mpi@3.0:')
depends_on('boost+python@:1.66.99')
depends_on('libxml2')
diff --git a/var/spack/repos/builtin/packages/apktool/package.py b/var/spack/repos/builtin/packages/apktool/package.py
new file mode 100644
index 0000000000..ba2041967f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/apktool/package.py
@@ -0,0 +1,42 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Apktool(Package):
+ """A tool for reverse engineering 3rd party, closed, binary
+ Android apps."""
+
+ homepage = "https://ibotpeaches.github.io/Apktool/"
+ url = "https://github.com/iBotPeaches/Apktool/archive/refs/tags/v2.6.0.tar.gz"
+
+ version('2.6.0', sha256='74739cdb1434ca35ec34e51ca7272ad3f378ae3ed0a2d5805d9a2fab5016037f')
+
+ depends_on('java@8:', type=('build', 'run'))
+
+ phases = ['build', 'install']
+
+ def setup_build_environment(self, env):
+ env.set('LC_ALL', 'en_US.UTF-8')
+
+ def build(self, spec, prefix):
+ gradlew = Executable('./gradlew')
+ gradlew('--info', '--debug', 'build', 'shadowJar')
+
+ def install(self, spec, prefix):
+ ln = which('ln')
+ mkdir(join_path(prefix, 'bin'))
+ install(
+ join_path('brut.apktool', 'apktool-cli', 'build', 'libs',
+ 'apktool-cli-all.jar'),
+ join_path(prefix, 'bin'))
+ install(
+ join_path('scripts', 'linux', 'apktool'),
+ join_path(prefix, 'bin'))
+ ln(
+ '-s',
+ join_path(prefix, 'bin', 'apktool-cli-all.jar'),
+ join_path(prefix, 'bin', 'apktool.jar'))
diff --git a/var/spack/repos/builtin/packages/arborx/package.py b/var/spack/repos/builtin/packages/arborx/package.py
index 5251d6191e..f95681bc00 100644
--- a/var/spack/repos/builtin/packages/arborx/package.py
+++ b/var/spack/repos/builtin/packages/arborx/package.py
@@ -20,7 +20,7 @@ class Arborx(CMakePackage):
version('master', branch='master')
version('1.1', sha256='2b5f2d2d5cec57c52f470c2bf4f42621b40271f870b4f80cb57e52df1acd90ce')
version('1.0', sha256='9b5f45c8180622c907ef0b7cc27cb18ba272ac6558725d9e460c3f3e764f1075')
- version('0.9-beta', sha256='b349b5708d1aa00e8c20c209ac75dc2d164ff9bf1b85adb5437346d194ba6c0d')
+ version('0.9-beta', sha256='b349b5708d1aa00e8c20c209ac75dc2d164ff9bf1b85adb5437346d194ba6c0d', deprecated=True)
# ArborX relies on Kokkos to provide devices, providing one-to-one matching
# variants. The only way to disable those devices is to make sure Kokkos
diff --git a/var/spack/repos/builtin/packages/asciidoc/package.py b/var/spack/repos/builtin/packages/asciidoc/package.py
index 72ab85c947..89aff06282 100644
--- a/var/spack/repos/builtin/packages/asciidoc/package.py
+++ b/var/spack/repos/builtin/packages/asciidoc/package.py
@@ -30,6 +30,9 @@ class Asciidoc(AutotoolsPackage):
depends_on('docbook-xsl', type=('build', 'run'))
depends_on('python@2.3.0:2.7', when='@:8.6.9', type=('build', 'run'))
depends_on('python@3.5:', when='@9.0.2:', type=('build', 'run'))
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
@when('@:8.6.9')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/assimp/package.py b/var/spack/repos/builtin/packages/assimp/package.py
index 58968fe271..9b694cf54f 100644
--- a/var/spack/repos/builtin/packages/assimp/package.py
+++ b/var/spack/repos/builtin/packages/assimp/package.py
@@ -14,7 +14,11 @@ class Assimp(CMakePackage):
url = "https://github.com/assimp/assimp/archive/v4.0.1.tar.gz"
git = "https://github.com/assimp/assimp.git"
+ maintainers = ['wdconinc']
+
version('master', branch='master')
+ version('5.1.4', sha256='bd32cdc27e1f8b7ac09d914ab92dd81d799c97e9e47315c1f40dcb7c6f7938c6')
+ version('5.1.3', sha256='50a7bd2c8009945e1833c591d16f4f7c491a3c6190f69d9d007167aadb175c35')
version('5.0.1', sha256='11310ec1f2ad2cd46b95ba88faca8f7aaa1efe9aa12605c55e3de2b977b3dbfc')
version('4.0.1', sha256='60080d8ab4daaab309f65b3cffd99f19eb1af8d05623fff469b9b652818e286e')
diff --git a/var/spack/repos/builtin/packages/autodiff/package.py b/var/spack/repos/builtin/packages/autodiff/package.py
new file mode 100644
index 0000000000..013b011f85
--- /dev/null
+++ b/var/spack/repos/builtin/packages/autodiff/package.py
@@ -0,0 +1,35 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Autodiff(CMakePackage):
+ """autodiff is automatic differentiation made easier for C++."""
+
+ homepage = "https://autodiff.github.io"
+ url = "https://github.com/autodiff/autodiff/archive/refs/tags/v0.6.4.tar.gz"
+ list_url = "https://github.com/autodiff/autodiff/releases"
+ git = "https://github.com/autodiff/autodiff.git"
+
+ maintainers = ['wdconinc', 'HadrienG2']
+
+ version('0.6.4', sha256='cfe0bb7c0de10979caff9d9bfdad7e6267faea2b8d875027397486b47a7edd75')
+ version('0.5.13', sha256='a73dc571bcaad6b44f74865fed51af375f5a877db44321b5568d94a4358b77a1')
+
+ variant('python', default='False', description='Enable the compilation of the python bindings.')
+ variant('examples', default='False', description='Enable the compilation of the example files.')
+
+ depends_on('cmake@3.0:', type='build')
+ depends_on('eigen')
+ depends_on('py-pybind11', type=('build', 'run'))
+
+ def cmake_args(self):
+ args = [
+ self.define('AUTODIFF_BUILD_TESTS', self.run_tests),
+ self.define_from_variant('AUTODIFF_BUILD_PYTHON', 'python'),
+ self.define_from_variant('AUTODIFF_BUILD_EXAMPLES', 'examples',)
+ ]
+ return args
diff --git a/var/spack/repos/builtin/packages/aws-parallelcluster/package.py b/var/spack/repos/builtin/packages/aws-parallelcluster/package.py
index ac81dbb87e..f1f8df2269 100644
--- a/var/spack/repos/builtin/packages/aws-parallelcluster/package.py
+++ b/var/spack/repos/builtin/packages/aws-parallelcluster/package.py
@@ -13,12 +13,14 @@ class AwsParallelcluster(PythonPackage):
tool to deploy and manage HPC clusters in the AWS cloud."""
homepage = "https://github.com/aws/aws-parallelcluster"
- pypi = "aws-parallelcluster/aws-parallelcluster-2.11.2.tar.gz"
+ pypi = "aws-parallelcluster/aws-parallelcluster-2.11.4.tar.gz"
maintainers = [
- 'demartinofra', 'enrico-usai', 'lukeseawalker', 'tilne'
+ 'demartinofra', 'enrico-usai', 'lukeseawalker',
]
+ version('2.11.4', sha256='449537ccda57f91f4ec6ae0c94a8e2b1a789f08f80245fadb28f44a4351d5da4')
+ version('2.11.3', sha256='7c1d74123f2f670846aed8fe1fcca5908bb46ec014e2dfc7d3ec8994447a37a0')
version('2.11.2', sha256='60d96a5ea4dca4816ceffc4546549743abd1f6207c62f016c9c348adc64b2ec0')
version('2.11.1', sha256='dc102eeb0db30fb183c1ee076e340223095fde7d9079004fde401b7ad658242d')
version('2.11.0', sha256='ec2f54f752a355649ca065a41fa4ac19697f570a136e10e0a6548f5abad3ea8a')
diff --git a/var/spack/repos/builtin/packages/axom/package.py b/var/spack/repos/builtin/packages/axom/package.py
index 8f7e59ec2c..d644ac134a 100644
--- a/var/spack/repos/builtin/packages/axom/package.py
+++ b/var/spack/repos/builtin/packages/axom/package.py
@@ -40,6 +40,8 @@ class Axom(CachedCMakePackage, CudaPackage):
version('main', branch='main', submodules=True)
version('develop', branch='develop', submodules=True)
+ version('0.6.1', tag='v0.6.1', submodules=True)
+ version('0.6.0', tag='v0.6.0', submodules=True)
version('0.5.0', tag='v0.5.0', submodules=True)
version('0.4.0', tag='v0.4.0', submodules=True)
version('0.3.3', tag='v0.3.3', submodules=True)
@@ -48,6 +50,8 @@ class Axom(CachedCMakePackage, CudaPackage):
version('0.3.0', tag='v0.3.0', submodules=True)
version('0.2.9', tag='v0.2.9', submodules=True)
+ patch('scr_examples_gtest.patch', when='@0.6.0:0.6.1')
+
root_cmakelists_dir = 'src'
# -----------------------------------------------------------------------
@@ -100,18 +104,22 @@ class Axom(CachedCMakePackage, CudaPackage):
depends_on("lua", when="+lua")
depends_on("scr", when="+scr")
- depends_on("kvtree@master", when="+scr")
+ depends_on("kvtree@main", when="+scr")
depends_on("dtcmp", when="+scr")
- depends_on("raja~openmp", when="+raja~openmp")
- depends_on("raja+openmp", when="+raja+openmp")
- depends_on("raja+cuda", when="+raja+cuda")
-
with when('+umpire'):
- depends_on('umpire@5.0.1:5')
+ depends_on('umpire@6.0.0:', when='@0.6.0:')
+ depends_on('umpire@5:5.0.1', when='@:0.5.0')
depends_on('umpire +openmp', when='+openmp')
depends_on('umpire +cuda', when='+cuda')
+ with when('+raja'):
+ depends_on('raja@0.14.0:', when='@0.6.0:')
+ depends_on('raja@:0.13.0', when='@:0.5.0')
+ depends_on("raja~openmp", when="~openmp")
+ depends_on("raja+openmp", when="+openmp")
+ depends_on("raja+cuda", when="+cuda")
+
for sm_ in CudaPackage.cuda_arch_values:
depends_on('raja cuda_arch={0}'.format(sm_),
when='+raja cuda_arch={0}'.format(sm_))
diff --git a/var/spack/repos/builtin/packages/axom/scr_examples_gtest.patch b/var/spack/repos/builtin/packages/axom/scr_examples_gtest.patch
new file mode 100644
index 0000000000..7fb7539e1d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/axom/scr_examples_gtest.patch
@@ -0,0 +1,50 @@
+diff --git a/src/axom/sidre/examples/spio/CMakeLists.txt b/src/axom/sidre/examples/spio/CMakeLists.txt
+index b4013c7fd..8558b8e4a 100644
+--- a/src/axom/sidre/examples/spio/CMakeLists.txt
++++ b/src/axom/sidre/examples/spio/CMakeLists.txt
+@@ -21,7 +21,7 @@ if (SCR_FOUND)
+ list(APPEND example_sources IO_SCR_Output.cpp)
+ endif()
+
+-set(spio_example_depends axom gtest ${EXTRA_LIBS})
++set(spio_example_depends axom ${EXTRA_LIBS})
+ blt_list_append(TO spio_example_depends ELEMENTS hdf5 IF HDF5_FOUND)
+ blt_list_append(TO spio_example_depends ELEMENTS scr IF SCR_FOUND)
+
+@@ -39,17 +39,23 @@ foreach(src ${example_sources})
+ )
+ endforeach()
+
+- if(AXOM_ENABLE_TESTS)
+- if(SCR_FOUND)
+- if(ENABLE_MPI)
+- axom_add_test(
+- NAME spio_IO_SCR_Checkpoint
+- COMMAND spio_IO_SCR_Checkpoint_ex
+- NUM_MPI_TASKS 4 )
+- else()
+- axom_add_test(
+- NAME spio_IO_SCR_Checkpoint
+- COMMAND spio_IO_SCR_Checkpoint_ex )
+- endif()
+- endif()
++if(SCR_FOUND AND AXOM_ENABLE_TESTS)
++ blt_add_executable(
++ NAME spio_scr_ex
++ SOURCES spio_scr.cpp
++ OUTPUT_DIR ${EXAMPLE_OUTPUT_DIRECTORY}
++ DEPENDS_ON ${spio_example_depends} scr gtest
++ FOLDER axom/sidre/examples
++ )
++
++ if(ENABLE_MPI)
++ axom_add_test(
++ NAME spio_IO_SCR_Checkpoint
++ COMMAND spio_IO_SCR_Checkpoint_ex
++ NUM_MPI_TASKS 4 )
++ else()
++ axom_add_test(
++ NAME spio_IO_SCR_Checkpoint
++ COMMAND spio_IO_SCR_Checkpoint_ex )
+ endif()
++endif()
diff --git a/var/spack/repos/builtin/packages/bcftools/package.py b/var/spack/repos/builtin/packages/bcftools/package.py
index 8f4f5bdad8..86341bb347 100644
--- a/var/spack/repos/builtin/packages/bcftools/package.py
+++ b/var/spack/repos/builtin/packages/bcftools/package.py
@@ -13,6 +13,8 @@ class Bcftools(AutotoolsPackage):
homepage = "https://samtools.github.io/bcftools/"
url = "https://github.com/samtools/bcftools/releases/download/1.3.1/bcftools-1.3.1.tar.bz2"
+ version('1.14', sha256='b7ef88ae89fcb55658c5bea2e8cb8e756b055e13860036d6be13756782aa19cb')
+ version('1.13', sha256='13bfa1da2a5edda8fa51196a47a0b4afb3fef17516451e4f0e78477f3dd30b90')
version('1.12', sha256='7a0e6532b1495b9254e38c6698d955e5176c1ee08b760dfea2235ee161a024f5')
version('1.10.2', sha256='f57301869d0055ce3b8e26d8ad880c0c1989bf25eaec8ea5db99b60e31354e2c')
version('1.9', sha256='6f36d0e6f16ec4acf88649fb1565d443acf0ba40f25a9afd87f14d14d13070c8')
@@ -38,6 +40,8 @@ class Bcftools(AutotoolsPackage):
depends_on('perl', when='@1.8:~perl-filters', type='run')
depends_on('perl', when='@1.8:+perl-filters', type=('build', 'run'))
+ depends_on('htslib@1.14', when='@1.14')
+ depends_on('htslib@1.13', when='@1.13')
depends_on('htslib@1.12', when='@1.12')
depends_on('htslib@1.10.2', when='@1.10.2')
depends_on('htslib@1.9', when='@1.9')
diff --git a/var/spack/repos/builtin/packages/berkeley-db/package.py b/var/spack/repos/builtin/packages/berkeley-db/package.py
index 0e5fbdbbf1..a31e9f6dfd 100644
--- a/var/spack/repos/builtin/packages/berkeley-db/package.py
+++ b/var/spack/repos/builtin/packages/berkeley-db/package.py
@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import re
class BerkeleyDb(AutotoolsPackage):
@@ -11,6 +12,8 @@ class BerkeleyDb(AutotoolsPackage):
# URL must remain http:// so Spack can bootstrap curl
url = "https://download.oracle.com/berkeley-db/db-18.1.40.tar.gz"
+ executables = [r'^db_load$'] # One should be sufficient
+
version("18.1.40", sha256="0cecb2ef0c67b166de93732769abdeba0555086d51de1090df325e18ee8da9c8")
version('18.1.32', sha256='fa1fe7de9ba91ad472c25d026f931802597c29f28ae951960685cde487c8d654', deprecated=True)
version('6.2.32', sha256='a9c5e2b004a5777aa03510cfe5cd766a4a3b777713406b02809c17c8e0e7a8fb')
@@ -32,6 +35,18 @@ class BerkeleyDb(AutotoolsPackage):
conflicts('+stl', when='~cxx', msg='+stl implies +cxx')
+ @classmethod
+ def determine_version(cls, exe):
+ """Return the version of the provided executable or ``None`` if
+ the version cannot be determined.
+
+ Arguments:
+ exe (str): absolute path to the executable being examined
+ """
+ output = Executable(exe)('-V', output=str, error=str)
+ match = re.search(r'Berkeley DB\s+([\d\.]+)', output)
+ return match.group(1) if match else None
+
def patch(self):
# some of the docs are missing in 18.1.40
if self.spec.satisfies("@18.1.40"):
diff --git a/var/spack/repos/builtin/packages/bohrium/package.py b/var/spack/repos/builtin/packages/bohrium/package.py
index c2defe0b8a..3e5895fb2d 100644
--- a/var/spack/repos/builtin/packages/bohrium/package.py
+++ b/var/spack/repos/builtin/packages/bohrium/package.py
@@ -68,6 +68,8 @@ class Bohrium(CMakePackage, CudaPackage):
conflicts('~node~proxy')
conflicts('~openmp~opencl~cuda')
+ conflicts('+cbridge', when='~python')
+
#
# Dependencies
#
@@ -94,9 +96,14 @@ class Bohrium(CMakePackage, CudaPackage):
depends_on('py-numpy', type=("build", "test", "run"), when="+python")
depends_on('swig', type="build", when="+python")
depends_on('py-cython', type="build", when="+python")
+ depends_on('py-virtualenv', type="build", when="+python")
+ depends_on('py-pip', type="build", when="+python")
+ depends_on('py-wheel', type="build", when="+python")
depends_on('zlib', when="+proxy")
+ depends_on('libsigsegv')
+
@property
def config_file(self):
"""Return the path of the Bohrium system-wide configuration file"""
@@ -108,14 +115,11 @@ class Bohrium(CMakePackage, CudaPackage):
def cmake_args(self):
spec = self.spec
- # Sanity check
- cuda_arch = spec.variants['cuda_arch'].value
- if "+cuda" in spec and len(cuda_arch) >= 1 and cuda_arch[0]:
- # TODO Add cuda_arch support to Bohrium once the basic setup
- # via Spack works.
- raise InstallError(
- "Bohrium does not support setting the CUDA architecture yet."
- )
+ # TODO: Use cuda_arch to specify compute capabilities to build.
+ # This package detects the compute capability of the device on the
+ # build host and uses that to set a single compute capability. This is
+ # limiting for generic builds and the ability to run CUDA builds on
+ # different hosts.
args = [
# Choose a particular python version
@@ -136,7 +140,7 @@ class Bohrium(CMakePackage, CudaPackage):
#
# Bridges and interfaces
"-DBRIDGE_BHXX=ON",
- "-DBRIDGE_C=" + str("+cbridge" in spec or "+python" in spec),
+ "-DBRIDGE_C=" + str("+cbridge" in spec and "+python" in spec),
"-DBRIDGE_NPBACKEND=" + str("+python" in spec),
"-DNO_PYTHON3=ON", # Only build python version we provide
]
diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py
index 360d72c585..933b9e2264 100644
--- a/var/spack/repos/builtin/packages/boost/package.py
+++ b/var/spack/repos/builtin/packages/boost/package.py
@@ -26,6 +26,7 @@ class Boost(Package):
maintainers = ['hainest']
version('develop', branch='develop', submodules=True)
+ version('1.78.0', sha256='8681f175d4bdb26c52222665793eef08490d7758529330f98d3b29dd0735bccc')
version('1.77.0', sha256='fc9f85fc030e233142908241af7a846e60630aa7388de9a5fafb1f3a26840854')
version('1.76.0', sha256='f0397ba6e982c4450f27bf32a2a83292aba035b827a5623a14636ea583318c41')
version('1.75.0', sha256='953db31e016db7bb207f11432bef7df100516eeb746843fa0486a222e3fd49cb')
@@ -333,6 +334,7 @@ class Boost(Package):
def determine_toolset(self, spec):
toolsets = {'g++': 'gcc',
'icpc': 'intel',
+ 'icpx': 'intel',
'clang++': 'clang',
'armclang++': 'clang',
'xlc++': 'xlcpp',
@@ -343,6 +345,8 @@ class Boost(Package):
if spec.satisfies('@1.47:'):
toolsets['icpc'] += '-linux'
+ toolsets['icpx'] += '-linux'
+
for cc, toolset in toolsets.items():
if cc in self.compiler.cxx_names:
return toolset
@@ -383,14 +387,8 @@ class Boost(Package):
with open('user-config.jam', 'w') as f:
# Boost may end up using gcc even though clang+gfortran is set in
# compilers.yaml. Make sure this does not happen:
- if not spec.satisfies('%intel'):
- # using intel-linux : : spack_cxx in user-config.jam leads to
- # error: at project-config.jam:12
- # error: duplicate initialization of intel-linux with the following parameters: # noqa
- # error: version = <unspecified>
- # error: previous initialization at ./user-config.jam:1
- f.write("using {0} : : {1} ;\n".format(boost_toolset_id,
- spack_cxx))
+ f.write("using {0} : : {1} ;\n".format(boost_toolset_id,
+ spack_cxx))
if '+mpi' in spec:
# Use the correct mpi compiler. If the compiler options are
@@ -572,6 +570,11 @@ class Boost(Package):
bootstrap(*bootstrap_options)
+ # strip the toolchain to avoid double include errors (intel) or
+ # user-config being overwritten (again intel, but different boost version)
+ filter_file(r'^\s*using {0}.*'.format(self.determine_toolset(spec)), '',
+ os.path.join(self.stage.source_path, 'project-config.jam'))
+
# b2 used to be called bjam, before 1.47 (sigh)
b2name = './b2' if spec.satisfies('@1.47:') else './bjam'
diff --git a/var/spack/repos/builtin/packages/bridger/package.py b/var/spack/repos/builtin/packages/bridger/package.py
index 769debf525..d9b676ef81 100644
--- a/var/spack/repos/builtin/packages/bridger/package.py
+++ b/var/spack/repos/builtin/packages/bridger/package.py
@@ -20,6 +20,14 @@ class Bridger(MakefilePackage, SourceforgePackage):
depends_on('boost')
depends_on('perl', type='run')
+ def flag_handler(self, name, flags):
+ if name == 'cflags':
+ # some of the plugins require gnu extensions
+ flags.append('-std=gnu99')
+ if name == 'cxxflags':
+ flags.append('-std=c++03')
+ return (flags, None, None)
+
def install(self, spec, prefix):
# bridger depends very much on perl scripts/etc in the source tree
install_path = join_path(prefix, 'usr/local/bridger')
diff --git a/var/spack/repos/builtin/packages/bufr/package.py b/var/spack/repos/builtin/packages/bufr/package.py
index 8550e5c4a2..9c665fa2bc 100644
--- a/var/spack/repos/builtin/packages/bufr/package.py
+++ b/var/spack/repos/builtin/packages/bufr/package.py
@@ -21,3 +21,25 @@ class Bufr(CMakePackage):
'jbathegit']
version('11.5.0', sha256='d154839e29ef1fe82e58cf20232e9f8a4f0610f0e8b6a394b7ca052e58f97f43')
+
+ def _setup_bufr_environment(self, env, suffix):
+ libname = 'libufr_{0}'.format(suffix)
+ lib = find_libraries(libname, root=self.prefix,
+ shared=False, recursive=True)
+ lib_envname = 'BUFR_LIB{0}'.format(suffix)
+ inc_envname = 'BUFR_INC{0}'.format(suffix)
+ include_dir = 'include_{0}'.format(suffix)
+
+ env.set(lib_envname, lib[0])
+ env.set(inc_envname, include_dir)
+
+ # Bufr has _DA (dynamic allocation) libs in versions <= 11.5.0
+ if self.spec.satisfies('@:11.5.0'):
+ da_lib = find_libraries(libname + "_DA", root=self.prefix,
+ shared=False, recursive=True)
+ env.set(lib_envname + '_DA', da_lib[0])
+ env.set(inc_envname + '_DA', include_dir)
+
+ def setup_run_environment(self, env):
+ for suffix in ('4', '8', 'd'):
+ self._setup_bufr_environment(env, suffix)
diff --git a/var/spack/repos/builtin/packages/c-blosc/gcc.patch b/var/spack/repos/builtin/packages/c-blosc/gcc.patch
index 8390b01e3d..8390b01e3d 100755..100644
--- a/var/spack/repos/builtin/packages/c-blosc/gcc.patch
+++ b/var/spack/repos/builtin/packages/c-blosc/gcc.patch
diff --git a/var/spack/repos/builtin/packages/camellia/package.py b/var/spack/repos/builtin/packages/camellia/package.py
index 241c0add79..714baff980 100644
--- a/var/spack/repos/builtin/packages/camellia/package.py
+++ b/var/spack/repos/builtin/packages/camellia/package.py
@@ -20,7 +20,7 @@ class Camellia(CMakePackage):
variant('moab', default=True, description='Compile with MOAB to include support for reading standard mesh formats')
- depends_on('trilinos+amesos+amesos2+belos+epetra+epetraext+exodus+ifpack+ifpack2+intrepid+intrepid2+kokkos+ml+muelu+sacado+shards+tpetra+zoltan+mumps+superlu-dist+hdf5+zlib+mpi@master,12.12.1:')
+ depends_on('trilinos+amesos+amesos2+belos+epetra+epetraext+exodus+ifpack+ifpack2+intrepid+intrepid2+kokkos+ml+muelu+sacado+shards+tpetra+zoltan+mumps+superlu-dist+hdf5+mpi@master,12.12.1:')
depends_on('moab@:4', when='+moab')
# Cameilla needs hdf5 but the description "hdf5@:1.8" is
diff --git a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
index 887322330a..dc692704de 100644
--- a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
@@ -18,8 +18,6 @@ class CbtfArgonavis(CMakePackage):
version('1.9.4', branch='1.9.4')
version('1.9.3', branch='1.9.3')
- variant('cti', default=False,
- description="Build MRNet with the CTI startup option")
variant('crayfe', default=False,
description="build only the FE tool using the runtime_dir \
to point to target build.")
@@ -41,19 +39,13 @@ class CbtfArgonavis(CMakePackage):
depends_on("boost@1.70.0:")
# For MRNet
- depends_on("mrnet@5.0.1-3:+cti", when='@develop+cti', type=('build', 'link', 'run'))
- depends_on("mrnet@5.0.1-3:+lwthreads", when='@develop~cti', type=('build', 'link', 'run'))
- depends_on("mrnet@5.0.1-3+cti", when='@1.9.3:9999+cti', type=('build', 'link', 'run'))
- depends_on("mrnet@5.0.1-3+lwthreads", when='@1.9.3:9999~cti', type=('build', 'link', 'run'))
+ depends_on("mrnet@5.0.1-3:+lwthreads", when='@develop', type=('build', 'link', 'run'))
+ depends_on("mrnet@5.0.1-3+lwthreads", when='@1.9.3:9999', type=('build', 'link', 'run'))
# For CBTF
depends_on("cbtf@develop", when='@develop', type=('build', 'link', 'run'))
depends_on("cbtf@1.9.3:9999", when='@1.9.3:9999', type=('build', 'link', 'run'))
- # For CBTF with cti
- depends_on("cbtf@develop+cti", when='@develop+cti', type=('build', 'link', 'run'))
- depends_on("cbtf@1.9.3:9999+cti", when='@1.9.3:9999+cti', type=('build', 'link', 'run'))
-
# For CBTF with runtime
depends_on("cbtf@develop+runtime", when='@develop+runtime', type=('build', 'link', 'run'))
depends_on("cbtf@1.9.3:9999+runtime", when='@1.9.3:9999+runtime', type=('build', 'link', 'run'))
@@ -68,9 +60,6 @@ class CbtfArgonavis(CMakePackage):
depends_on("cbtf-krell@develop", when='@develop', type=('build', 'link', 'run'))
depends_on("cbtf-krell@1.9.3:9999", when='@1.9.3:9999', type=('build', 'link', 'run'))
- depends_on('cbtf-krell@develop+cti', when='@develop+cti', type=('build', 'link', 'run'))
- depends_on('cbtf-krell@1.9.3:9999+cti', when='@1.9.3:9999+cti', type=('build', 'link', 'run'))
-
depends_on('cbtf-krell@develop+runtime', when='@develop+runtime', type=('build', 'link', 'run'))
depends_on('cbtf-krell@1.9.3:9999+runtime', when='@1.9.3:9999+runtime', type=('build', 'link', 'run'))
diff --git a/var/spack/repos/builtin/packages/cbtf-krell/package.py b/var/spack/repos/builtin/packages/cbtf-krell/package.py
index 3f464d8953..98ff820300 100644
--- a/var/spack/repos/builtin/packages/cbtf-krell/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-krell/package.py
@@ -40,8 +40,6 @@ class CbtfKrell(CMakePackage):
variant('build_type', default='RelWithDebInfo',
description='The build type to build',
values=('Debug', 'Release', 'RelWithDebInfo'))
- variant('cti', default=False,
- description="Build MRNet with the CTI startup option")
variant('crayfe', default=False,
description="build only the FE tool using the runtime_dir \
to point to target build.")
@@ -66,10 +64,7 @@ class CbtfKrell(CMakePackage):
depends_on("dyninst@10.1.0", when='@1.9.3:9999')
# For MRNet
- depends_on("mrnet@5.0.1-3:+cti", when='@develop+cti', type=('build', 'link', 'run'))
depends_on("mrnet@5.0.1-3:+lwthreads", when='@develop', type=('build', 'link', 'run'))
-
- depends_on("mrnet@5.0.1-3+cti", when='@1.9.3:9999+cti', type=('build', 'link', 'run'))
depends_on("mrnet@5.0.1-3+lwthreads", when='@1.9.3:9999', type=('build', 'link', 'run'))
# For Xerces-C
@@ -79,10 +74,6 @@ class CbtfKrell(CMakePackage):
depends_on("cbtf@develop", when='@develop', type=('build', 'link', 'run'))
depends_on("cbtf@1.9.3:9999", when='@1.9.3:9999', type=('build', 'link', 'run'))
- # For CBTF with cti
- depends_on("cbtf@develop+cti", when='@develop+cti', type=('build', 'link', 'run'))
- depends_on("cbtf@1.9.3:9999+cti", when='@1.9.3:9999+cti', type=('build', 'link', 'run'))
-
# For CBTF with runtime
depends_on("cbtf@develop+runtime", when='@develop+runtime', type=('build', 'link', 'run'))
depends_on("cbtf@1.9.3:9999+runtime", when='@1.9.3:9999+runtime', type=('build', 'link', 'run'))
diff --git a/var/spack/repos/builtin/packages/cbtf-lanl/package.py b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
index 67ae6fe7c6..c260f86498 100644
--- a/var/spack/repos/builtin/packages/cbtf-lanl/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
@@ -25,15 +25,10 @@ class CbtfLanl(CMakePackage):
variant('runtime', default=False,
description="build only the runtime libraries and collectors.")
- variant('cti', default=False,
- description="Build MRNet with the CTI startup option")
-
depends_on("cmake@3.0.2:", type='build')
# For MRNet
- depends_on("mrnet@5.0.1-3:+cti", when='@develop+cti')
depends_on("mrnet@5.0.1-3:+lwthreads", when='@develop')
- depends_on("mrnet@5.0.1-3+cti", when='@1.9.3:9999+cti')
depends_on("mrnet@5.0.1-3+lwthreads", when='@1.9.3:9999')
# For Xerces-C
@@ -43,10 +38,6 @@ class CbtfLanl(CMakePackage):
depends_on("cbtf@develop", when='@develop')
depends_on("cbtf@1.9.3:9999", when='@1.9.3:9999')
- # For CBTF with cti
- depends_on("cbtf@develop+cti", when='@develop+cti')
- depends_on("cbtf@1.9.3:9999+cti", when='@1.9.3:9999+cti')
-
# For CBTF with runtime
depends_on("cbtf@develop+runtime", when='@develop+runtime')
depends_on("cbtf@1.9.3:9999+runtime", when='@1.9.3:9999+runtime')
@@ -55,9 +46,6 @@ class CbtfLanl(CMakePackage):
depends_on("cbtf-krell@develop", when='@develop')
depends_on("cbtf-krell@1.9.3:9999", when='@1.9.3:9999')
- depends_on('cbtf-krell@develop+cti', when='@develop+cti')
- depends_on('cbtf-krell@1.9.3:9999+cti', when='@1.9.3:9999+cti')
-
depends_on('cbtf-krell@develop+runtime', when='@develop+runtime')
depends_on('cbtf-krell@1.9.3:9999+runtime', when='@1.9.3:9999+runtime')
diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py
index dea5d055d3..080b74ceba 100644
--- a/var/spack/repos/builtin/packages/cbtf/package.py
+++ b/var/spack/repos/builtin/packages/cbtf/package.py
@@ -21,9 +21,6 @@ class Cbtf(CMakePackage):
version('1.9.4', branch='1.9.4')
version('1.9.3', branch='1.9.3')
- variant('cti', default=False,
- description="Build MRNet with the CTI startup option")
-
variant('runtime', default=False,
description="build only the runtime libraries and collectors.")
@@ -42,9 +39,7 @@ class Cbtf(CMakePackage):
depends_on("boost@1.70.0:")
# For MRNet
- depends_on("mrnet@5.0.1-3:+cti", when='@develop+cti')
depends_on("mrnet@5.0.1-3:+lwthreads", when='@develop')
- depends_on("mrnet@5.0.1-3+cti", when='@1.9.3:9999+cti')
depends_on("mrnet@5.0.1-3+lwthreads", when='@1.9.3:9999')
# For Xerces-C
diff --git a/var/spack/repos/builtin/packages/ccache/package.py b/var/spack/repos/builtin/packages/ccache/package.py
index 1278c73e2d..c99903b1f7 100644
--- a/var/spack/repos/builtin/packages/ccache/package.py
+++ b/var/spack/repos/builtin/packages/ccache/package.py
@@ -47,6 +47,9 @@ class Ccache(CMakePackage):
conflicts('%gcc@:5', when='@4.4:')
conflicts('%clang@:4', when='@4.4:')
+ def cmake_args(self):
+ return [self.define('ENABLE_TESTING', False)]
+
# Before 4.0 this was an Autotools package
@when('@:3')
def cmake(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/cctools/cctools_6.1.1.python.patch b/var/spack/repos/builtin/packages/cctools/cctools_6.1.1.python.patch
index 726e216e59..726e216e59 100755..100644
--- a/var/spack/repos/builtin/packages/cctools/cctools_6.1.1.python.patch
+++ b/var/spack/repos/builtin/packages/cctools/cctools_6.1.1.python.patch
diff --git a/var/spack/repos/builtin/packages/cctools/cctools_7.0.18.python.patch b/var/spack/repos/builtin/packages/cctools/cctools_7.0.18.python.patch
index 1c8dfc428e..1c8dfc428e 100755..100644
--- a/var/spack/repos/builtin/packages/cctools/cctools_7.0.18.python.patch
+++ b/var/spack/repos/builtin/packages/cctools/cctools_7.0.18.python.patch
diff --git a/var/spack/repos/builtin/packages/cdo/package.py b/var/spack/repos/builtin/packages/cdo/package.py
index 8de2088d7e..f1fb3dede8 100644
--- a/var/spack/repos/builtin/packages/cdo/package.py
+++ b/var/spack/repos/builtin/packages/cdo/package.py
@@ -20,6 +20,9 @@ class Cdo(AutotoolsPackage):
maintainers = ['skosukhin', 'Try2Code']
+ version('2.0.2', sha256='34dfdd0d4126cfd35fc69e37e60901c8622d13ec5b3fa5f0fe6a1cc866cc5a70', url='https://code.mpimet.mpg.de/attachments/download/26654/cdo-2.0.2.tar.gz')
+ version('2.0.1', sha256='d0794d261e22efa0adac8e6d18de2b60d54de5e1a4df6127c65fc417feb8fdac', url='https://code.mpimet.mpg.de/attachments/download/26477/cdo-2.0.1.tar.gz')
+ version('2.0.0', sha256='6bca54e9d69d8c1f072f1996547b7347a65743d15ba751967e9bb16e0ff7a843', url='https://code.mpimet.mpg.de/attachments/download/26370/cdo-2.0.0.tar.gz')
version('1.9.10', sha256='cc39c89bbb481d7b3945a06c56a8492047235f46ac363c4f0d980fccdde6677e', url='https://code.mpimet.mpg.de/attachments/download/24638/cdo-1.9.10.tar.gz')
version('1.9.9', sha256='959b5b58f495d521a7fd1daa84644888ec87d6a0df43f22ad950d17aee5ba98d', url='https://code.mpimet.mpg.de/attachments/download/23323/cdo-1.9.9.tar.gz')
version('1.9.8', sha256='f2660ac6f8bf3fa071cf2a3a196b3ec75ad007deb3a782455e80f28680c5252a', url='https://code.mpimet.mpg.de/attachments/download/20826/cdo-1.9.8.tar.gz')
diff --git a/var/spack/repos/builtin/packages/cgdb/package.py b/var/spack/repos/builtin/packages/cgdb/package.py
index 8c3c2e4ec1..d5d920d926 100644
--- a/var/spack/repos/builtin/packages/cgdb/package.py
+++ b/var/spack/repos/builtin/packages/cgdb/package.py
@@ -9,18 +9,32 @@ from spack import *
class Cgdb(AutotoolsPackage):
"""A curses front-end to GDB"""
- homepage = "https://cgdb.github.io"
- url = "https://cgdb.me/files/cgdb-0.7.0.tar.gz"
+ maintainers = ['tuxfan']
+ homepage = 'https://cgdb.github.io'
+ url = 'https://cgdb.me/files/cgdb-0.7.1.tar.gz'
+ git = 'https://github.com/cgdb/cgdb.git'
+ version('master', branch='master', submodule=False, preferred=True)
+ version('0.7.1', sha256='bb723be58ec68cb59a598b8e24a31d10ef31e0e9c277a4de07b2f457fe7de198')
version('0.7.0', sha256='bf7a9264668db3f9342591b08b2cc3bbb08e235ba2372877b4650b70c6fb5423')
# Required dependency
+ depends_on('gdb', type='run')
depends_on('ncurses')
depends_on('readline')
+ depends_on('autoconf', type='build', when='@master')
+ depends_on('automake', type='build', when='@master')
+ depends_on('libtool', type='build', when='@master')
+ depends_on('m4', type='build', when='@master')
depends_on('flex', type='build')
depends_on('bison', type='build')
depends_on('texinfo', type='build')
+ @when('@master')
+ def autoreconf(self, spec, prefix):
+ sh = which('sh')
+ sh('autogen.sh')
+
def configure_args(self):
spec = self.spec
diff --git a/var/spack/repos/builtin/packages/charmpp/fj.patch b/var/spack/repos/builtin/packages/charmpp/fj.patch
index d5df680c0c..d5df680c0c 100755..100644
--- a/var/spack/repos/builtin/packages/charmpp/fj.patch
+++ b/var/spack/repos/builtin/packages/charmpp/fj.patch
diff --git a/var/spack/repos/builtin/packages/clhep/package.py b/var/spack/repos/builtin/packages/clhep/package.py
index 65b04a35c7..9717ca69f0 100644
--- a/var/spack/repos/builtin/packages/clhep/package.py
+++ b/var/spack/repos/builtin/packages/clhep/package.py
@@ -18,6 +18,7 @@ class Clhep(CMakePackage):
maintainers = ['drbenmorgan']
+ version('2.4.5.1', sha256='2517c9b344ad9f55974786ae6e7a0ef8b22f4abcbf506df91194ea2299ce3813')
version('2.4.4.0', sha256='5df78c11733a091da9ae5a24ce31161d44034dd45f20455587db85f1ca1ba539')
version('2.4.1.3', sha256='27c257934929f4cb1643aa60aeaad6519025d8f0a1c199bc3137ad7368245913')
version('2.4.1.2', sha256='ff96e7282254164380460bc8cf2dff2b58944084eadcd872b5661eb5a33fa4b8')
diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py
index 6832fd65cf..e4eb15c5e0 100644
--- a/var/spack/repos/builtin/packages/clingo/package.py
+++ b/var/spack/repos/builtin/packages/clingo/package.py
@@ -26,6 +26,7 @@ class Clingo(CMakePackage):
version('master', branch='master', submodules=True)
version('spack', commit='2a025667090d71b2c9dce60fe924feb6bde8f667', submodules=True)
+ version('5.5.1', sha256='b9cf2ba2001f8241b8b1d369b6f353e628582e2a00f13566e51c03c4dd61f67e')
version('5.5.0', sha256='c9d7004a0caec61b636ad1c1960fbf339ef8fdee9719321fc1b6b210613a8499')
version('5.4.1', sha256='ac6606388abfe2482167ce8fd4eb0737ef6abeeb35a9d3ac3016c6f715bfee02')
version('5.4.0', sha256='e2de331ee0a6d254193aab5995338a621372517adcf91568092be8ac511c18f3')
diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py
index d7fde8d65f..b4f052a4f8 100644
--- a/var/spack/repos/builtin/packages/cmake/package.py
+++ b/var/spack/repos/builtin/packages/cmake/package.py
@@ -22,6 +22,8 @@ class Cmake(Package):
executables = ['^cmake$']
version('master', branch='master')
+ version('3.22.1', sha256='0e998229549d7b3f368703d20e248e7ee1f853910d42704aa87918c213ea82c0')
+ version('3.22.0', sha256='998c7ba34778d2dfdb3df8a695469e24b11e2bfa21fbe41b361a3f45e1c9345e')
version('3.21.4', sha256='d9570a95c215f4c9886dd0f0564ca4ef8d18c30750f157238ea12669c2985978')
version('3.21.3', sha256='d14d06df4265134ee42c4d50f5a60cb8b471b7b6a47da8e5d914d49dd783794f')
version('3.21.2', sha256='94275e0b61c84bb42710f5320a23c6dcb2c6ee032ae7d2a616f53f68b3d21659')
diff --git a/var/spack/repos/builtin/packages/cni-plugins/package.py b/var/spack/repos/builtin/packages/cni-plugins/package.py
new file mode 100644
index 0000000000..f02cdae953
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cni-plugins/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class CniPlugins(Package):
+ """Standard networking plugins for container networking"""
+
+ homepage = 'https://github.com/containernetworking/plugins'
+ url = 'https://github.com/containernetworking/plugins/archive/v1.0.1.tar.gz'
+ maintainers = ['bernhardkaindl']
+
+ version('1.0.1', sha256='2ba3cd9f341a7190885b60d363f6f23c6d20d975a7a0ab579dd516f8c6117619')
+
+ depends_on('go', type='build')
+
+ def install(self, spec, prefix):
+ utils = 'github.com/containernetworking/plugins/pkg/utils/buildversion'
+ which('./build_linux.sh')(
+ '-ldflags',
+ '-extldflags -static -X {0}.BuildVersion={1}'.format(utils, self.version),
+ )
+ install_tree('bin', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/conmon/package.py b/var/spack/repos/builtin/packages/conmon/package.py
new file mode 100644
index 0000000000..73589b3914
--- /dev/null
+++ b/var/spack/repos/builtin/packages/conmon/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Conmon(MakefilePackage):
+ """An OCI container runtime monitor"""
+
+ homepage = 'https://github.com/containers/conmon'
+ url = 'https://github.com/containers/conmon/archive/v2.0.30.tar.gz'
+ maintainers = ['bernhardkaindl']
+
+ version('2.0.30', sha256='4b0a98fbe8a63c42f60edac25c19aa6606caa7b1e4fe7846fc7f7de0b566ba25')
+
+ depends_on('go', type='build')
+ depends_on('go-md2man', type='build')
+ depends_on('pkgconfig', type='build')
+ depends_on('libseccomp')
+ depends_on('glib')
+
+ def install(self, spec, prefix):
+ make('install', 'PREFIX=' + prefix)
diff --git a/var/spack/repos/builtin/packages/cosign/package.py b/var/spack/repos/builtin/packages/cosign/package.py
new file mode 100644
index 0000000000..2ebc71b5bc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cosign/package.py
@@ -0,0 +1,34 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+
+from spack import *
+
+
+class Cosign(Package):
+ """
+ Cosign is a go package for container Signing, verification and storage
+ in an OCI registry.
+ """
+
+ homepage = "https://github.com/sigstore/cosign"
+ url = "https://github.com/sigstore/cosign/archive/refs/tags/v1.3.1.tar.gz"
+ git = "https://github.com/sigstore/cosign.git"
+
+ version('main', branch='main')
+ version('1.3.1', sha256='7f7e0af52ee8d795440e66dcc1a7a25783e22d30935f4f957779628b348f38af')
+
+ depends_on("go", type='build')
+
+ def setup_build_environment(self, env):
+ # Point GOPATH at the top of the staging dir for the build step.
+ env.prepend_path('GOPATH', self.stage.path)
+
+ def install(self, spec, prefix):
+ go = which("go")
+ go("build", "-o", "cosign", os.path.join("cmd", "cosign", "main.go"))
+ mkdirp(prefix.bin)
+ install("cosign", prefix.bin)
diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py
index d1ac5b086e..18ec324a7e 100644
--- a/var/spack/repos/builtin/packages/cp2k/package.py
+++ b/var/spack/repos/builtin/packages/cp2k/package.py
@@ -53,14 +53,15 @@ class Cp2k(MakefilePackage, CudaPackage):
' and BQB compression'))
variant('spglib', default=False, description='Enable support for spglib')
- variant('cuda_arch_35_k20x', default=False,
- description=('CP2K (resp. DBCSR) has specific parameter sets for'
- ' different GPU models. Enable this when building'
- ' with cuda_arch=35 for a K20x instead of a K40'))
- variant('cuda_fft', default=False,
- description=('Use CUDA also for FFTs in the PW part of CP2K'))
- variant('cuda_blas', default=False,
- description=('Use CUBLAS for general matrix operations in DBCSR'))
+ with when('+cuda'):
+ variant('cuda_arch_35_k20x', default=False,
+ description=('CP2K (resp. DBCSR) has specific parameter sets for'
+ ' different GPU models. Enable this when building'
+ ' with cuda_arch=35 for a K20x instead of a K40'))
+ variant('cuda_fft', default=False,
+ description=('Use CUDA also for FFTs in the PW part of CP2K'))
+ variant('cuda_blas', default=False,
+ description=('Use CUBLAS for general matrix operations in DBCSR'))
HFX_LMAX_RANGE = range(4, 8)
@@ -167,8 +168,6 @@ class Cp2k(MakefilePackage, CudaPackage):
depends_on('python@3.6:', when='@7:+cuda', type='build')
depends_on('spglib', when='+spglib')
- conflicts('~cuda', '+cuda_fft')
- conflicts('~cuda', '+cuda_blas')
# Apparently cp2k@4.1 needs an "experimental" version of libwannier.a
# which is only available contacting the developer directly. See INSTALL
@@ -487,11 +486,11 @@ class Cp2k(MakefilePackage, CudaPackage):
# Currently AOCC support only static libraries of ELPA
if '%aocc' in spec:
- libs.append(join_path(elpa.prefix.lib,
+ libs.append(join_path(elpa.libs.directories[0],
('libelpa{elpa_suffix}.a'
.format(elpa_suffix=elpa_suffix))))
else:
- libs.append(join_path(elpa.prefix.lib,
+ libs.append(join_path(elpa.libs.directories[0],
('libelpa{elpa_suffix}.{dso_suffix}'
.format(elpa_suffix=elpa_suffix,
dso_suffix=dso_suffix))))
@@ -662,7 +661,7 @@ class Cp2k(MakefilePackage, CudaPackage):
]
def build(self, spec, prefix):
- if len(spec.variants['cuda_arch'].value) > 1:
+ if '+cuda' in spec and len(spec.variants['cuda_arch'].value) > 1:
raise InstallError("cp2k supports only one cuda_arch at a time")
# Apparently the Makefile bases its paths on PWD
diff --git a/var/spack/repos/builtin/packages/cpio/package.py b/var/spack/repos/builtin/packages/cpio/package.py
index 90f195bc90..0134fb1b66 100644
--- a/var/spack/repos/builtin/packages/cpio/package.py
+++ b/var/spack/repos/builtin/packages/cpio/package.py
@@ -9,8 +9,8 @@ from spack import *
class Cpio(AutotoolsPackage, GNUMirrorPackage):
- """GNU cpio copies files into or out of a cpio or tar archive. The
- archive can be another file on the disk, a magnetic tape, or a pipe.
+ """GNU cpio copies files into or out of a cpio or tar archive and the file system.
+ The archive can be another file on the disk, a magnetic tape, or a pipe.
"""
homepage = "https://www.gnu.org/software/cpio/"
gnu_mirror_path = "cpio/cpio-2.13.tar.gz"
@@ -19,10 +19,12 @@ class Cpio(AutotoolsPackage, GNUMirrorPackage):
version('2.13', sha256='e87470d9c984317f658567c03bfefb6b0c829ff17dbf6b0de48d71a4c8f3db88')
- patch('https://src.fedoraproject.org/rpms/cpio/raw/dfe64c466d3ea2c8dfbd99700d9006f610064167/f/cpio-2.13-mutiple-definition.patch', sha256='d22633c368b8aedf4c08b23b6fbaa81a52404c8943ab04926404083ac10f1a4b', when='%gcc@10:')
-
build_directory = 'spack-build'
+ def patch(self):
+ """Fix mutiple definition of char *program_name for gcc@10: and clang"""
+ filter_file(r'char \*program_name;', '', 'src/global.c')
+
@classmethod
def determine_version(cls, exe):
output = Executable(exe)('--version', output=str, error=str)
diff --git a/var/spack/repos/builtin/packages/cray-libsci/package.py b/var/spack/repos/builtin/packages/cray-libsci/package.py
index bb21ecdfef..b5e00e7071 100755..100644
--- a/var/spack/repos/builtin/packages/cray-libsci/package.py
+++ b/var/spack/repos/builtin/packages/cray-libsci/package.py
@@ -12,6 +12,7 @@ class CrayLibsci(Package):
homepage = "https://docs.nersc.gov/development/libraries/libsci/"
has_code = False # Skip attempts to fetch source that is not available
+ version("21.08.1.2")
version("20.06.1")
version("20.03.1")
version("19.06.1")
diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py
index 70f259df2a..d11547f506 100644
--- a/var/spack/repos/builtin/packages/cuda/package.py
+++ b/var/spack/repos/builtin/packages/cuda/package.py
@@ -25,6 +25,10 @@ from spack import *
# format returned by platform.system() and 'arch' by platform.machine()
_versions = {
+ '11.5.1': {
+ 'Linux-aarch64': ('73e1d0e97c7fa686efe7e00fb1e5f179372c4eec8e14d4f44ab58d5f6cf57f63', 'https://developer.download.nvidia.com/compute/cuda/11.5.1/local_installers/cuda_11.5.1_495.29.05_linux_sbsa.run'),
+ 'Linux-x86_64': ('60bea2fc0fac95574015f865355afbf599422ec2c85554f5f052b292711a4bca', 'https://developer.download.nvidia.com/compute/cuda/11.5.1/local_installers/cuda_11.5.1_495.29.05_linux.run'),
+ 'Linux-ppc64le': ('9e0e494d945634fe8ad3e12d7b91806aa4220ed27487bb211030d651b27c67a9', 'https://developer.download.nvidia.com/compute/cuda/11.5.1/local_installers/cuda_11.5.1_495.29.05_linux_ppc64le.run')},
'11.5.0': {
'Linux-aarch64': ('6ea9d520cc956cc751a5ac54f4acc39109627f4e614dd0b1a82cc86f2aa7d8c4', 'https://developer.download.nvidia.com/compute/cuda/11.5.0/local_installers/cuda_11.5.0_495.29.05_linux_sbsa.run'),
'Linux-x86_64': ('ae0a1693d9497cf3d81e6948943e3794636900db71c98d58eefdacaf7f1a1e4c', 'https://developer.download.nvidia.com/compute/cuda/11.5.0/local_installers/cuda_11.5.0_495.29.05_linux.run'),
diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py
index 198d3b29ad..5f3f26d898 100644
--- a/var/spack/repos/builtin/packages/curl/package.py
+++ b/var/spack/repos/builtin/packages/curl/package.py
@@ -16,6 +16,8 @@ class Curl(AutotoolsPackage):
# URL must remain http:// so Spack can bootstrap curl
url = "http://curl.haxx.se/download/curl-7.78.0.tar.bz2"
+ version('7.80.0', sha256='dd0d150e49cd950aff35e16b628edf04927f0289df42883750cf952bb858189c')
+ version('7.79.1', sha256='de62c4ab9a9316393962e8b94777a570bb9f71feb580fb4475e412f2f9387851')
version('7.79.0', sha256='d607a677f473f79f96c964100327125a6204a39d835dc00dab7fc0129b959f42')
version('7.78.0', sha256='98530b317dc95ccb324bbe4f834f07bb642fbc393b794ddf3434f246a71ea44a')
version('7.77.0', sha256='6c0c28868cb82593859fc43b9c8fdb769314c855c05cf1b56b023acf855df8ea')
diff --git a/var/spack/repos/builtin/packages/cyrus-sasl/package.py b/var/spack/repos/builtin/packages/cyrus-sasl/package.py
index 355180ab79..930de4aa7c 100644
--- a/var/spack/repos/builtin/packages/cyrus-sasl/package.py
+++ b/var/spack/repos/builtin/packages/cyrus-sasl/package.py
@@ -24,3 +24,4 @@ class CyrusSasl(AutotoolsPackage):
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
+ depends_on('groff', type='build')
diff --git a/var/spack/repos/builtin/packages/czmq/package.py b/var/spack/repos/builtin/packages/czmq/package.py
index c09987df73..67a3b71c9b 100644
--- a/var/spack/repos/builtin/packages/czmq/package.py
+++ b/var/spack/repos/builtin/packages/czmq/package.py
@@ -19,6 +19,8 @@ class Czmq(AutotoolsPackage):
depends_on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('pkgconfig', type='build')
+ depends_on('docbook-xml', type='build')
+ depends_on('docbook-xsl', type='build')
depends_on('uuid')
depends_on('libzmq')
diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py
index 4efbc8ddd5..0681b1cfa6 100644
--- a/var/spack/repos/builtin/packages/dbus/package.py
+++ b/var/spack/repos/builtin/packages/dbus/package.py
@@ -28,6 +28,8 @@ class Dbus(Package):
version('1.8.2', sha256='5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08')
depends_on('pkgconfig', type='build')
+ depends_on('docbook-xml@4.4', type='build')
+ depends_on('docbook-xsl', type='build')
depends_on('expat')
depends_on('glib')
depends_on('libsm')
diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py
index c85f936de7..7e6d940281 100644
--- a/var/spack/repos/builtin/packages/dd4hep/package.py
+++ b/var/spack/repos/builtin/packages/dd4hep/package.py
@@ -24,21 +24,25 @@ class Dd4hep(CMakePackage):
tags = ['hep']
version('master', branch='master')
+ version('1.19', sha256='d2eccf5e8402ba7dab2e1d7236e12ee4db9b1c5e4253c40a140bf35580db1d9b')
version('1.18', sha256='1e909a42b969dfd966224fa8ab1eca5aa05136baf3c00a140f2f6d812b497152')
version('1.17', sha256='036a9908aaf1e13eaf5f2f43b6f5f4a8bdda8183ddc5befa77a4448dbb485826')
version('1.16.1', sha256='c8b1312aa88283986f89cc008d317b3476027fd146fdb586f9f1fbbb47763f1a')
- version('1.16', sha256='ea9755cd255cf1b058e0e3cd743101ca9ca5ff79f4c60be89f9ba72b1ae5ec69')
- version('1.15', sha256='992a24bd4b3dfaffecec9d1c09e8cde2c7f89d38756879a47b23208242f4e352')
- version('1.14.1', sha256='5b5742f1e23c2b36d3174cca95f810ce909c0eb66f3d6d7acb0ba657819e6717')
- version('1.14', sha256='b603aa3c0db8dda392253aa71fa4a0f0c3c9715d47df0b895d45c1e8849f4895')
- version('1.13.1', sha256='83fa70cd74ce93b2f52f098388dff58d179f05ace5b50aea3f408bb8abf7cb73')
- version('1.13', sha256='0b1f9d902ebe21a9178c1e41204c066b29f68c8836fd1d03a9ce979811ddb295')
- version('1.12.1', sha256='85e8c775ec03c499ce10911e228342e757c81ce9ef2a9195cb253b85175a2e93')
- version('1.12', sha256='133a1fb8ce0466d2482f3ebb03e60b3bebb9b2d3e33d14ba15c8fbb91706b398')
- version('1.11.2', sha256='96a53dd26cb8df11c6dae54669fbc9cc3c90dd47c67e07b24be9a1341c95abc4')
- version('1.11.1', sha256='d7902dd7f6744bbda92f6e303ad5a3410eec4a0d2195cdc86f6c1167e72893f0')
- version('1.11', sha256='25643296f15f9d11ad4ad550b7c3b92e8974fc56f1ee8e4455501010789ae7b6')
- version('1.10', sha256='1d6b5d1c368dc8bcedd9c61b7c7e1a44bad427f8bd34932516aff47c88a31d95')
+ # versions older than 1.16.1 are no longer supported
+ # (they need several patches like https://github.com/AIDASoft/DD4hep/pull/796)
+ version('1.16', sha256='ea9755cd255cf1b058e0e3cd743101ca9ca5ff79f4c60be89f9ba72b1ae5ec69', deprecated=True)
+ version('1.15', sha256='992a24bd4b3dfaffecec9d1c09e8cde2c7f89d38756879a47b23208242f4e352', deprecated=True)
+ version('1.14.1', sha256='5b5742f1e23c2b36d3174cca95f810ce909c0eb66f3d6d7acb0ba657819e6717', deprecated=True)
+ version('1.14', sha256='b603aa3c0db8dda392253aa71fa4a0f0c3c9715d47df0b895d45c1e8849f4895', deprecated=True)
+ version('1.13.1', sha256='83fa70cd74ce93b2f52f098388dff58d179f05ace5b50aea3f408bb8abf7cb73', deprecated=True)
+ version('1.13', sha256='0b1f9d902ebe21a9178c1e41204c066b29f68c8836fd1d03a9ce979811ddb295', deprecated=True)
+ version('1.12.1', sha256='85e8c775ec03c499ce10911e228342e757c81ce9ef2a9195cb253b85175a2e93', deprecated=True)
+ # these version won't build with +ddcad as the subpackage doesn't exit yet
+ version('1.12', sha256='133a1fb8ce0466d2482f3ebb03e60b3bebb9b2d3e33d14ba15c8fbb91706b398', deprecated=True)
+ version('1.11.2', sha256='96a53dd26cb8df11c6dae54669fbc9cc3c90dd47c67e07b24be9a1341c95abc4', deprecated=True)
+ version('1.11.1', sha256='d7902dd7f6744bbda92f6e303ad5a3410eec4a0d2195cdc86f6c1167e72893f0', deprecated=True)
+ version('1.11', sha256='25643296f15f9d11ad4ad550b7c3b92e8974fc56f1ee8e4455501010789ae7b6', deprecated=True)
+ version('1.10', sha256='1d6b5d1c368dc8bcedd9c61b7c7e1a44bad427f8bd34932516aff47c88a31d95', deprecated=True)
generator = 'Ninja'
@@ -50,13 +54,24 @@ class Dd4hep(CMakePackage):
# See https://github.com/spack/spack/issues/24232
patch('cmake_language.patch', when='@:1.17')
+ # variants for subpackages
+ variant('ddcad', default=True, description="Enable CAD interface based on Assimp")
+ variant('ddg4', default=True, description="Enable the simulation part based on Geant4")
+ variant('ddrec', default=True, description="Build DDRec subpackage.")
+ variant('dddetectors', default=True, description="Build DDDetectors subpackage.")
+ variant('ddcond', default=True, description="Build DDCond subpackage.")
+ variant('ddalign', default=True, description="Build DDAlign subpackage.")
+ variant('dddigi', default=True, description="Build DDDigi subpackage.")
+ variant('ddeve', default=True, description="Build DDEve subpackage.")
+ variant('utilityapps', default=True, description='Build UtilityApps subpackage.')
+
+ # variants for other build options
variant('xercesc', default=False, description="Enable 'Detector Builders' based on XercesC")
- variant('geant4', default=False, description="Enable the simulation part based on Geant4")
- variant('assimp', default=False, description="Enable CAD interface based on Assimp")
variant('hepmc3', default=False, description="Enable build with hepmc3")
variant('lcio', default=False, description="Enable build with lcio")
variant('edm4hep', default=True, description="Enable build with edm4hep")
variant('geant4units', default=False, description="Use geant4 units throughout")
+ variant('tbb', default=False, description="Enable build with tbb")
variant('debug', default=False,
description="Enable debug build flag - adds extra info in"
" some places in addtion to the debug build type")
@@ -64,12 +79,14 @@ class Dd4hep(CMakePackage):
depends_on('cmake @3.12:', type='build')
depends_on('ninja', type='build')
depends_on('boost @1.49:')
- depends_on('root @6.08: +gdml +math +opengl +python +x')
+ depends_on('root @6.08: +gdml +math +python')
+ depends_on('root @6.08: +gdml +math +python +x +opengl', when="+ddeve")
extends('python')
depends_on('xerces-c', when='+xercesc')
- depends_on('geant4@10.2.2:', when='+geant4')
- depends_on('assimp@5.0.2:', when='+assimp')
+ depends_on('geant4@10.2.2:', when='+ddg4')
+ depends_on('assimp@5.0.2:', when='+ddcad')
depends_on('hepmc3', when="+hepmc3")
+ depends_on('intel-tbb', when='+tbb')
depends_on('lcio', when="+lcio")
depends_on('edm4hep', when="+edm4hep")
depends_on('py-pytest', type="test")
@@ -77,6 +94,7 @@ class Dd4hep(CMakePackage):
# See https://github.com/AIDASoft/DD4hep/pull/771
conflicts('^cmake@3.16:3.17.0', when='@1.15',
msg='cmake version with buggy FindPython breaks dd4hep cmake config')
+ conflicts('~ddrec+dddetectors', msg="Need to enable +ddrec to build +dddetectors.")
def cmake_args(self):
spec = self.spec
@@ -87,7 +105,8 @@ class Dd4hep(CMakePackage):
args = [
self.define_from_variant('DD4HEP_USE_EDM4HEP', 'edm4hep'),
self.define_from_variant('DD4HEP_USE_XERCESC', 'xercesc'),
- self.define_from_variant('DD4HEP_USE_GEANT4', 'geant4'),
+ self.define_from_variant('DD4HEP_USE_TBB', 'tbb'),
+ self.define_from_variant('DD4HEP_USE_GEANT4', 'ddg4'),
self.define_from_variant('DD4HEP_USE_LCIO', 'lcio'),
self.define_from_variant('DD4HEP_USE_HEPMC3', 'hepmc3'),
self.define_from_variant('DD4HEP_USE_GEANT4_UNITS', 'geant4units'),
@@ -102,6 +121,27 @@ class Dd4hep(CMakePackage):
"-DBoost_NO_BOOST_CMAKE=ON",
"-DPYTHON_EXECUTABLE={0}".format(spec['python'].command.path),
]
+ subpackages = []
+ if spec.satisfies('+ddg4'):
+ subpackages += ['DDG4']
+ if spec.satisfies('+ddcond'):
+ subpackages += ['DDCond']
+ if spec.satisfies('+ddcad'):
+ subpackages += ['DDCAD']
+ if spec.satisfies('+ddrec'):
+ subpackages += ['DDRec']
+ if spec.satisfies('+dddetectors'):
+ subpackages += ['DDDetectors']
+ if spec.satisfies('+ddalign'):
+ subpackages += ['DDAlign']
+ if spec.satisfies('+dddigi'):
+ subpackages += ['DDDigi']
+ if spec.satisfies('+ddeve'):
+ subpackages += ['DDEve']
+ if spec.satisfies('+utilityapps'):
+ subpackages += ['UtilityApps']
+ subpackages = ' '.join(subpackages)
+ args += [self.define('DD4HEP_BUILD_PACKAGES', subpackages)]
return args
def setup_run_environment(self, env):
diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py
index 17f5cbd75b..9c288912e2 100644
--- a/var/spack/repos/builtin/packages/dealii/package.py
+++ b/var/spack/repos/builtin/packages/dealii/package.py
@@ -25,6 +25,7 @@ class Dealii(CMakePackage, CudaPackage):
generator = 'Ninja'
version('master', branch='master')
+ version('9.3.2', sha256='5341d76bfd75d3402fc6907a875513efb5fe8a8b99af688d94443c492d5713e8')
version('9.3.1', sha256='a62f4676ab2dc029892251d141427fb75cbb83cddd606019f615d0dde9c61ab8')
version('9.3.0', sha256='aef8c7a87510ce827dfae3bdd4ed7bff82004dc09f96fa7a65b2554f2839b931')
version('9.2.0', sha256='d05a82fb40f1f1e24407451814b5a6004e39366a44c81208b1ae9d65f3efa43a')
diff --git a/var/spack/repos/builtin/packages/delly2/package.py b/var/spack/repos/builtin/packages/delly2/package.py
index a674d4e17d..3690a086b9 100644
--- a/var/spack/repos/builtin/packages/delly2/package.py
+++ b/var/spack/repos/builtin/packages/delly2/package.py
@@ -15,31 +15,43 @@ class Delly2(MakefilePackage):
homepage = "https://github.com/dellytools/delly"
git = "https://github.com/dellytools/delly.git"
- version('2017-08-03', commit='e32a9cd55c7e3df5a6ae4a91f31a0deb354529fc')
+ version('0.9.1', tag='v0.9.1')
+ version('2017-08-03', commit='e32a9cd55c7e3df5a6ae4a91f31a0deb354529fc', deprecated=True)
- depends_on('htslib')
- depends_on('boost')
- depends_on('bcftools')
+ variant('openmp', default=False, description='Build with openmp support')
+
+ depends_on('htslib', type=('build', 'link'))
+ depends_on('boost', type=('build', 'link'))
+ depends_on('bcftools', type='run')
def edit(self, spec, prefix):
+ if '+openmp' in self.spec:
+ env['PARALLEL'] = '1'
# Only want to build delly source, not submodules. Build fails
# using provided submodules, succeeds with existing spack recipes.
- makefile = FileFilter('Makefile')
- makefile.filter('HTSLIBSOURCES =', '#HTSLIBSOURCES')
- makefile.filter('BOOSTSOURCES =', '#BOOSTSOURCES')
- makefile.filter('SEQTK_ROOT ?=', '#SEQTK_ROOT')
- makefile.filter('BOOST_ROOT ?=', '#BOOST_ROOT')
- makefile.filter('cd src', '# cd src')
- makefile.filter('.htslib ', '')
- makefile.filter('.bcftools ', '')
- makefile.filter('.boost ', '')
- makefile.filter('.htslib:', '# .htslib:')
- makefile.filter('.bcftools:', '# .bcftools:')
- makefile.filter('.boost:', '# .boost:')
+ if self.spec.satisfies('@2017-08-03'):
+ makefile = FileFilter('Makefile')
+ makefile.filter('HTSLIBSOURCES =', '#HTSLIBSOURCES')
+ makefile.filter('BOOSTSOURCES =', '#BOOSTSOURCES')
+ makefile.filter('SEQTK_ROOT ?=', '#SEQTK_ROOT')
+ makefile.filter('BOOST_ROOT ?=', '#BOOST_ROOT')
+ makefile.filter('cd src', '# cd src')
+ makefile.filter('.htslib ', '')
+ makefile.filter('.bcftools ', '')
+ makefile.filter('.boost ', '')
+ makefile.filter('.htslib:', '# .htslib:')
+ makefile.filter('.bcftools:', '# .bcftools:')
+ makefile.filter('.boost:', '# .boost:')
+ else:
+ env['EBROOTHTSLIB'] = self.spec['htslib'].prefix
+ filter_file('BUILT_PROGRAMS =.*$',
+ 'BUILT_PROGRAMS = src/delly src/dpe', 'Makefile')
+ filter_file('${SUBMODULES}', '', 'Makefile', string=True)
def install(self, spec, prefix):
mkdirp(prefix.bin)
with working_dir('src'):
install('delly', prefix.bin)
install('dpe', prefix.bin)
- install('cov', prefix.bin)
+ if self.spec.satisfies('@2017-08-03'):
+ install('cov', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/dpcpp/package.py b/var/spack/repos/builtin/packages/dpcpp/package.py
new file mode 100644
index 0000000000..6e67f4a390
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dpcpp/package.py
@@ -0,0 +1,157 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+import os
+
+from spack import *
+
+
+class Dpcpp(CMakePackage):
+ """Data Parallel C++ compiler: Intel's implementation of SYCL programming model"""
+
+ homepage = 'https://intel.github.io/llvm-docs/'
+ git = 'https://github.com/intel/llvm.git'
+
+ version('develop', branch='sycl')
+ version('2021.09', commit='bd68232bb96386bf7649345c0557ba520e73c02d')
+
+ maintainers = ['ravil-mobile']
+ variant('cuda', default=False, description='switch from OpenCL to CUDA')
+ variant('rocm', default=False, description='switch from OpenCL to ROCm')
+ variant('rocm-platform', default='AMD', values=('AMD', 'NVIDIA'), multi=False, description='choose ROCm backend')
+ variant('openmp', default=False, description='build with OpenMP without target offloading')
+ variant('esimd-cpu', default=False, description='build with ESIMD_CPU support')
+ variant('assertions', default=False, description='build with assertions')
+ variant('docs', default=False, description='build Doxygen documentation')
+ variant('werror', default=False, description='treat warnings as errors')
+ variant('shared', default=False, description='build shared libraries')
+ variant('remangle_libclc', default=True, description='remangle libclc gen. variants')
+ variant('lld', default=False, description='use LLD linker for build')
+
+ depends_on('cmake@3.16.2:', type='build')
+ depends_on('ninja@1.10.0:', type='build')
+
+ depends_on('cuda@10.2.0:11.4.999', when='+cuda')
+
+ # NOTE: AMD HIP needs to be tested; it will be done in the next update
+ # depends_on('cuda@10.2.0:10.2.999', when='rocm-platform=NVIDIA', type='build')
+ # depends_on('hip@4.0.0:', when='+rocm', type='build')
+
+ root_cmakelists_dir = 'llvm'
+
+ def cmake_args(self):
+ llvm_external_projects = 'sycl;llvm-spirv;opencl;libdevice;xpti;xptifw'
+
+ if '+openmp' in self.spec:
+ llvm_external_projects += ';openmp'
+
+ sycl_dir = os.path.join(self.stage.source_path, 'sycl')
+ spirv_dir = os.path.join(self.stage.source_path, 'llvm-spirv')
+ xpti_dir = os.path.join(self.stage.source_path, 'xpti')
+ xptifw_dir = os.path.join(self.stage.source_path, 'xptifw')
+ libdevice_dir = os.path.join(self.stage.source_path, 'libdevice')
+ llvm_enable_projects = 'clang;' + llvm_external_projects
+ libclc_targets_to_build = ''
+ sycl_build_pi_rocm_platform = self.spec.variants['rocm-platform'].value
+
+ if self.spec.satisfies('target=x86_64:'):
+ llvm_targets_to_build = 'X86'
+ elif self.spec.satisfies('target=aarch64:'):
+ llvm_targets_to_build = 'ARM;AArch64'
+ else:
+ raise InstallError('target is not supported. '
+ 'This package only works on x86_64 or aarch64')
+
+ is_cuda = '+cuda' in self.spec
+ is_rocm = '+rocm' in self.spec
+
+ if is_cuda or is_rocm:
+ llvm_enable_projects += ';libclc'
+
+ if is_cuda:
+ llvm_targets_to_build += ';NVPTX'
+ libclc_targets_to_build = 'nvptx64--;nvptx64--nvidiacl'
+
+ if is_rocm:
+ if sycl_build_pi_rocm_platform == 'AMD':
+ llvm_targets_to_build += ';AMDGPU'
+ libclc_targets_to_build += ';amdgcn--;amdgcn--amdhsa'
+ elif sycl_build_pi_rocm_platform and not is_cuda:
+ llvm_targets_to_build += ';NVPTX'
+ libclc_targets_to_build += ';nvptx64--;nvptx64--nvidiacl'
+
+ args = [
+ self.define_from_variant('LLVM_ENABLE_ASSERTIONS', 'assertions'),
+ self.define('LLVM_TARGETS_TO_BUILD', llvm_targets_to_build),
+ self.define('LLVM_EXTERNAL_PROJECTS', llvm_external_projects),
+ self.define('LLVM_EXTERNAL_SYCL_SOURCE_DIR', sycl_dir),
+ self.define('LLVM_EXTERNAL_LLVM_SPIRV_SOURCE_DIR', spirv_dir),
+ self.define('LLVM_EXTERNAL_XPTI_SOURCE_DIR', xpti_dir),
+ self.define('XPTI_SOURCE_DIR', xpti_dir),
+ self.define('LLVM_EXTERNAL_XPTIFW_SOURCE_DIR', xptifw_dir),
+ self.define('LLVM_EXTERNAL_LIBDEVICE_SOURCE_DIR', libdevice_dir),
+ self.define('LLVM_ENABLE_PROJECTS', llvm_enable_projects),
+ self.define('LIBCLC_TARGETS_TO_BUILD', libclc_targets_to_build),
+ self.define_from_variant('SYCL_BUILD_PI_CUDA', 'cuda'),
+ self.define_from_variant('SYCL_BUILD_PI_ROCM', 'rocm'),
+ self.define('SYCL_BUILD_PI_ROCM_PLATFORM', sycl_build_pi_rocm_platform),
+ self.define('LLVM_BUILD_TOOLS', True),
+ self.define_from_variant('SYCL_ENABLE_WERROR', 'werror'),
+ self.define('SYCL_INCLUDE_TESTS', True),
+ self.define_from_variant('LIBCLC_GENERATE_REMANGLED_VARIANTS',
+ 'remangle_libclc'),
+ self.define_from_variant('LLVM_ENABLE_DOXYGEN', 'docs'),
+ self.define_from_variant('LLVM_ENABLE_SPHINX', 'docs'),
+ self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
+ self.define('SYCL_ENABLE_XPTI_TRACING', 'ON'),
+ self.define_from_variant('LLVM_ENABLE_LLD', 'lld'),
+ self.define_from_variant('SYCL_BUILD_PI_ESIMD_CPU', 'esimd-cpu'),
+ ]
+
+ if is_cuda or (is_rocm and sycl_build_pi_rocm_platform == 'NVIDIA'):
+ args.append(
+ self.define('CUDA_TOOLKIT_ROOT_DIR', self.spec['cuda'].prefix)
+ )
+
+ if '+openmp' in self.spec:
+ omp_dir = os.path.join(self.stage.source_path, 'openmp')
+ args.extend([
+ self.define('LLVM_EXTERNAL_OPENMP_SOURCE_DIR', omp_dir),
+ self.define('OPENMP_ENABLE_LIBOMPTARGET', False),
+ ])
+
+ if self.compiler.name == 'gcc':
+ gcc_prefix = ancestor(self.compiler.cc, 2)
+ args.append(self.define('GCC_INSTALL_PREFIX', gcc_prefix))
+
+ return args
+
+ def setup_build_environment(self, env):
+ if '+cuda' in self.spec:
+ env.set('CUDA_LIB_PATH', '{0}/lib64/stubs'.format(self.spec['cuda'].prefix))
+
+ @run_after("install")
+ def post_install(self):
+ clang_cpp_path = os.path.join(self.spec.prefix.bin, 'clang++')
+ dpcpp_path = os.path.join(self.spec.prefix.bin, 'dpcpp')
+
+ real_clang_cpp_path = os.path.realpath(clang_cpp_path)
+ os.symlink(real_clang_cpp_path, dpcpp_path)
+
+ def setup_run_environment(self, env):
+ bin_path = self.spec.prefix.bin
+ for env_var_name, compiler in zip(['CC', 'CXX'], ['clang', 'clang++']):
+ env.set(env_var_name, os.path.join(bin_path, compiler))
+
+ include_env_vars = ['C_INCLUDE_PATH', 'CPLUS_INCLUDE_PATH', 'INCLUDE']
+ for var in include_env_vars:
+ env.prepend_path(var, self.prefix.include)
+ env.prepend_path(var, self.prefix.include.sycl)
+
+ sycl_build_pi_rocm_platform = self.spec.variants['rocm-platform'].value
+ if '+cuda' in self.spec or sycl_build_pi_rocm_platform == 'NVIDIA':
+ env.prepend_path('PATH', self.spec['cuda'].prefix.bin)
+ env.set('CUDA_TOOLKIT_ROOT_DIR', self.spec['cuda'].prefix)
diff --git a/var/spack/repos/builtin/packages/dsfmt/package.py b/var/spack/repos/builtin/packages/dsfmt/package.py
new file mode 100644
index 0000000000..83a449d6a4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dsfmt/package.py
@@ -0,0 +1,32 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Dsfmt(MakefilePackage):
+ """Double precision SIMD-oriented Fast Mersenne Twister"""
+
+ homepage = "http://www.math.sci.hiroshima-u.ac.jp/m-mat/MT/SFMT/"
+ url = "https://github.com/MersenneTwister-Lab/dSFMT/archive/v2.2.4.tar.gz"
+
+ maintainers = ['haampie']
+
+ # This package does not have a target to build a library nor a make install target,
+ # so we add it for them.
+ patch('targets.patch')
+
+ version('2.2.5', sha256='b7bc498cd140b4808963b1ff9f33b42a491870f54775c1060ecad0e02bcaffb4')
+ version('2.2.4', sha256='39682961ecfba621a98dbb6610b6ae2b7d6add450d4f08d8d4edd0e10abd8174')
+
+ @property
+ def libs(self):
+ return find_libraries('libdSFMT', root=self.prefix, recursive=True)
+
+ def build(self, spec, prefix):
+ make('build-library')
+
+ def install(self, spec, prefix):
+ make('PREFIX={0}'.format(prefix), 'install')
diff --git a/var/spack/repos/builtin/packages/dsfmt/targets.patch b/var/spack/repos/builtin/packages/dsfmt/targets.patch
new file mode 100644
index 0000000000..8d4664aa39
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dsfmt/targets.patch
@@ -0,0 +1,25 @@
+diff --git a/Makefile b/Makefile
+index 5cc4ed6..adc09c5 100644
+--- a/Makefile
++++ b/Makefile
+@@ -196,3 +196,20 @@ test-sse2-M216091: test.c dSFMT.c dSFMT.h
+
+ clean:
+ rm -f *.o *~ test-*-M*
++
++ifeq ($(OS), WINNT)
++ SHLIB_EXT := dll
++else ifeq ($(OS), Darwin)
++ SHLIB_EXT := dylib
++else
++ SHLIB_EXT := so
++endif
++
++build-library: dSFMT.c
++ $(CC) $(CCFLAGS) -shared -fPIC -DDSFMT_MEXP=19937 -DDSFMT_DO_NOT_USE_OLD_NAMES -DDSFMT_SHLIB $< -o libdSFMT.$(SHLIB_EXT)
++ echo 1 > $@
++
++install: build-library
++ install -d $(PREFIX)/lib/
++ install -m 644 libdSFMT.so $(PREFIX)/lib/
++
diff --git a/var/spack/repos/builtin/packages/dust/package.py b/var/spack/repos/builtin/packages/dust/package.py
new file mode 100644
index 0000000000..6835d1917e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dust/package.py
@@ -0,0 +1,51 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Dust(Package):
+ """du + rust = dust. Like du but more intuitive."""
+
+ homepage = "https://github.com/bootandy/dust"
+ url = "https://github.com/bootandy/dust/archive/v0.7.5.tar.gz"
+
+ maintainers = ["fangohr"]
+
+ version(
+ "0.7.5",
+ sha256="f892aaf7a0a7852e12d01b2ced6c2484fb6dc5fe7562abdf0c44a2d08aa52618",
+ )
+
+ depends_on("rust")
+
+ sanity_check_is_file = [join_path("bin", "dust")]
+
+ def install(self, spec, prefix):
+ cargo = which("cargo")
+ cargo("install", "--root", prefix, "--path", ".")
+
+ @run_after("install")
+ def check_install(self):
+ print("Attempt to call 'dust' with '--version'")
+ dust = Executable(join_path(self.spec["dust"].prefix.bin, "dust"))
+ output = dust(
+ "--version",
+ output=str.split,
+ )
+ print("stdout received fromm dust is '{}".format(output))
+ assert "Dust " in output
+
+ def test(self):
+ """Run this smoke test when requested explicitly"""
+
+ dustpath = join_path(self.spec["dust"].prefix.bin, "dust")
+ options = ["--version"]
+ purpose = "Check dust can execute (with option '--version')"
+ expected = ["Dust "]
+
+ self.run_test(
+ dustpath, options=options, expected=expected, status=[0], purpose=purpose
+ )
diff --git a/var/spack/repos/builtin/packages/dyninst/package.py b/var/spack/repos/builtin/packages/dyninst/package.py
index 3288a7df56..6bff2233ac 100644
--- a/var/spack/repos/builtin/packages/dyninst/package.py
+++ b/var/spack/repos/builtin/packages/dyninst/package.py
@@ -19,6 +19,8 @@ class Dyninst(CMakePackage):
tags = ['e4s']
version('master', branch='master')
+ version('12.0.1', tag='v12.0.1')
+ version('12.0.0', tag='v12.0.0')
version('11.0.1', tag='v11.0.1')
version('11.0.0', tag='v11.0.0')
version('10.2.1', tag='v10.2.1')
@@ -53,6 +55,7 @@ class Dyninst(CMakePackage):
# before that.
# NB: Parallel DWARF parsing in Dyninst 10.2.0 requires a thread-
# safe libdw
+ depends_on('elfutils@0.186:', type='link', when='@12.0.1:')
depends_on('elfutils@0.178:', type='link', when='@10.2.0:')
depends_on('elfutils', type='link', when='@9.3.0:10.1')
depends_on('libelf', type='link', when='@:9.2')
@@ -106,6 +109,7 @@ class Dyninst(CMakePackage):
'-DElfUtils_ROOT_DIR=%s' % spec['elf'].prefix,
'-DLibIberty_ROOT_DIR=%s' % spec['libiberty'].prefix,
'-DTBB_ROOT_DIR=%s' % spec['tbb'].prefix,
+ self.define('LibIberty_LIBRARIES', spec['libiberty'].libs)
]
if '+openmp' in spec:
diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py
index 5b29673f28..ee6c7a6a28 100644
--- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py
+++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py
@@ -30,6 +30,7 @@ class EcpDataVisSdk(BundlePackage, CudaPackage):
variant('veloc', default=False, description="Enable VeloC")
# Vis
+ variant('sensei', default=False, description="Enable Sensei")
variant('ascent', default=False, description="Enable Ascent")
variant('paraview', default=False, description="Enable ParaView")
variant('sz', default=False, description="Enable SZ")
@@ -40,8 +41,6 @@ class EcpDataVisSdk(BundlePackage, CudaPackage):
variant('cinema', default=False, description="Enable Cinema")
# Outstanding build issues
- variant('catalyst', default=False, description="Enable Catalyst")
- conflicts('+catalyst')
variant('visit', default=False, description="Enable VisIt")
conflicts('+visit')
@@ -109,15 +108,19 @@ class EcpDataVisSdk(BundlePackage, CudaPackage):
dav_sdk_depends_on('veloc', when='+veloc')
+ # Currenly only develop has necessary patches. Update this after SC21 release
+ propagate_to_sensei = [(v, v) for v in ['adios2', 'ascent', 'hdf5', 'vtkm']]
+ propagate_to_sensei.extend([('paraview', 'catalyst'), ('visit', 'libsim')])
+ dav_sdk_depends_on('sensei@develop +vtkio +python ~miniapps', when='+sensei',
+ propagate=dict(propagate_to_sensei))
+
dav_sdk_depends_on('ascent+shared+mpi+fortran+openmp+python+vtkh+dray',
when='+ascent')
- dav_sdk_depends_on('catalyst', when='+catalyst')
-
depends_on('py-cinemasci', when='+cinema')
# +adios2 is not yet enabled in the paraview package
- paraview_base_spec = 'paraview+mpi+python3+kits'
+ paraview_base_spec = 'paraview +mpi +python3 +kits'
# Want +shared when not using cuda
dav_sdk_depends_on(paraview_base_spec + '+shared ~cuda',
when='+paraview ~cuda',
diff --git a/var/spack/repos/builtin/packages/edm4hep/package.py b/var/spack/repos/builtin/packages/edm4hep/package.py
index 879decf40b..cfa8c142bc 100644
--- a/var/spack/repos/builtin/packages/edm4hep/package.py
+++ b/var/spack/repos/builtin/packages/edm4hep/package.py
@@ -16,6 +16,7 @@ class Edm4hep(CMakePackage):
tags = ["hep", "key4hep"]
version('master', branch='master')
+ version('0.4', sha256='bcb729cd4a6f5917b8f073364fc950788111e178dd16b7e5218361f459c92a24')
version('0.3.2', sha256='b6a28649a4ba9ec1c4423bd1397b0a810ca97374305c4856186b506e4c00f769')
version('0.3.1', sha256='eeec38fe7d72d2a72f07a63dca0a34ca7203727f67869c0abf6bef014b8b319b')
version('0.3', sha256='d0ad8a486c3ed1659ea97d47b268fe56718fdb389b5935f23ba93804e4d5fbc5')
@@ -32,9 +33,11 @@ class Edm4hep(CMakePackage):
depends_on('python', type='build')
depends_on('root@6.08:')
- depends_on('podio@0.13:')
+ depends_on('podio@0.14:', when='@0.4:')
+ depends_on('podio@0.13.0:0.13', when='@:0.3')
- depends_on('hepmc@:2', type='test')
+ depends_on('hepmc@:2', type='test', when='@:0.4.0')
+ depends_on('hepmc3', type='test', when='@0.4.1:')
depends_on('heppdt', type='test')
depends_on('catch2@3.0.1:', type='test')
diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py
index 001f9c21f4..70aa378f86 100644
--- a/var/spack/repos/builtin/packages/elpa/package.py
+++ b/var/spack/repos/builtin/packages/elpa/package.py
@@ -14,6 +14,7 @@ class Elpa(AutotoolsPackage, CudaPackage, ROCmPackage):
homepage = 'https://elpa.mpcdf.mpg.de/'
url = 'https://elpa.mpcdf.mpg.de/software/tarball-archive/Releases/2015.11.001/elpa-2015.11.001.tar.gz'
+ version('2021.05.002_bugfix', sha256='deabc48de5b9e4b2f073d749d335c8f354a7ce4245b643a23b7951cd6c90224b')
version('2021.05.001', sha256='a4f1a4e3964f2473a5f8177f2091a9da5c6b5ef9280b8272dfefcbc3aad44d41')
version('2020.05.001', sha256='66ff1cf332ce1c82075dc7b5587ae72511d2bcb3a45322c94af6b01996439ce5')
version('2019.11.001', sha256='10374a8f042e23c7e1094230f7e2993b6f3580908a213dbdf089792d05aff357')
@@ -66,10 +67,17 @@ class Elpa(AutotoolsPackage, CudaPackage, ROCmPackage):
@property
def headers(self):
suffix = '_openmp' if self.spec.satisfies('+openmp') else ''
+
+ # upstream sometimes adds tarball suffixes not part of the internal version
+ elpa_version = str(self.spec.version)
+ for vsuffix in ("_bugfix", ):
+ if elpa_version.endswith(vsuffix): # implementation of py3.9 removesuffix
+ elpa_version = elpa_version[:-len(vsuffix)]
+
incdir = os.path.join(
self.spec.prefix.include,
- 'elpa{suffix}-{version!s}'.format(
- suffix=suffix, version=self.spec.version))
+ 'elpa{suffix}-{version}'.format(
+ suffix=suffix, version=elpa_version))
hlist = find_all_headers(incdir)
hlist.directories = [incdir]
diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py
index 28ce836bec..e8f3d9ee67 100644
--- a/var/spack/repos/builtin/packages/esmf/package.py
+++ b/var/spack/repos/builtin/packages/esmf/package.py
@@ -18,6 +18,7 @@ class Esmf(MakefilePackage):
homepage = "https://www.earthsystemcog.org/projects/esmf/"
url = 'https://github.com/esmf-org/esmf/archive/ESMF_8_0_1.tar.gz'
+ version('8.2.0', sha256='3693987aba2c8ae8af67a0e222bea4099a48afe09b8d3d334106f9d7fc311485')
version('8.1.1', sha256='58c2e739356f21a1b32673aa17a713d3c4af9d45d572f4ba9168c357d586dc75')
version('8.0.1', sha256='9172fb73f3fe95c8188d889ee72fdadb4f978b1d969e1d8e401e8d106def1d84')
version('8.0.0', sha256='051dca45f9803d7e415c0ea146df15ce487fb55f0fce18ca61d96d4dba0c8774')
diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py
index 1ffe2d7cbe..599ff5f4d7 100755..100644
--- a/var/spack/repos/builtin/packages/exago/package.py
+++ b/var/spack/repos/builtin/packages/exago/package.py
@@ -13,10 +13,15 @@ class Exago(CMakePackage, CudaPackage):
homepage = 'https://gitlab.pnnl.gov/exasgd/frameworks/exago'
git = 'https://gitlab.pnnl.gov/exasgd/frameworks/exago.git'
-
- version('1.0.0', tag='v1.0.0')
- version('0.99.2', tag='v0.99.2')
- version('0.99.1', tag='v0.99.1')
+ maintainers = ['ashermancinelli', 'CameronRutherford']
+
+ version('1.2.0', commit='255a214e', submodules=True)
+ version('1.1.2', commit='db3bb16e', submodules=True)
+ version('1.1.1', commit='0e0a3f27', submodules=True)
+ version('1.1.0', commit='dc8dd855', submodules=True)
+ version('1.0.0', commit='230d7df2')
+ version('0.99.2', commit='56961641')
+ version('0.99.1', commit='0ae426c7')
version('master', branch='master')
version('develop', branch='develop')
@@ -26,16 +31,19 @@ class Exago(CMakePackage, CudaPackage):
# Solver options
variant('hiop', default=False, description='Enable/Disable HiOp')
- variant('petsc', default=True, description='Enable/Disable PETSc')
variant('ipopt', default=False, description='Enable/Disable IPOPT')
# Dependencides
depends_on('mpi', when='+mpi')
depends_on('blas')
depends_on('cuda', when='+cuda')
+
depends_on('raja', when='+raja')
depends_on('raja+cuda', when='+raja+cuda')
+ depends_on('raja@0.14.0:', when='@1.1.0: +raja')
+
depends_on('umpire', when='+raja')
+ depends_on('umpire@6.0.0:', when='@1.1.0: +raja')
# Some allocator code in Umpire only works with static libs
depends_on('umpire+cuda~shared', when='+raja+cuda')
@@ -43,21 +51,22 @@ class Exago(CMakePackage, CudaPackage):
# For some versions of RAJA package, camp cuda variant does not get set
# correctly, so we must explicitly depend on it even though we don't use
# camp
- depends_on('camp+cuda', when='+cuda')
+ depends_on('camp+cuda', when='+raja+cuda')
depends_on('cmake@3.18:', type='build')
# HiOp dependency logic
- depends_on('hiop+shared', when='+hiop')
depends_on('hiop+raja', when='+hiop+raja')
depends_on('hiop@0.3.99:', when='@0.99:+hiop')
+ depends_on('hiop@0.5.1:', when='@1.1.0:+hiop')
depends_on('hiop+cuda', when='+hiop+cuda')
depends_on('hiop~mpi', when='+hiop~mpi')
depends_on('hiop+mpi', when='+hiop+mpi')
# Require PETSc < 3.15 per ExaGO issue #199
- depends_on('petsc@3.13:3.14', when='+petsc')
- depends_on('petsc~mpi', when='+petsc~mpi')
+ depends_on('petsc@3.13:3.14')
+ depends_on('petsc~mpi', when='~mpi')
+
depends_on('ipopt', when='+ipopt')
flag_handler = build_system_flags
@@ -71,10 +80,10 @@ class Exago(CMakePackage, CudaPackage):
args.append(self.define_from_variant('EXAGO_ENABLE_MPI', 'mpi'))
args.append(self.define_from_variant('EXAGO_ENABLE_RAJA', 'raja'))
args.append(self.define_from_variant('EXAGO_ENABLE_HIOP', 'hiop'))
- args.append(self.define_from_variant('EXAGO_ENABLE_PETSC', 'petsc'))
args.append(self.define_from_variant('EXAGO_ENABLE_IPOPT', 'ipopt'))
args.append(self.define_from_variant('EXAGO_ENABLE_GPU', 'cuda'))
args.append(self.define_from_variant('EXAGO_ENABLE_CUDA', 'cuda'))
+ args.append("-DPETSC_DIR='{0}'".format(spec['petsc'].prefix))
if '+cuda' in spec:
cuda_arch_list = spec.variants['cuda_arch'].value
@@ -83,7 +92,4 @@ class Exago(CMakePackage, CudaPackage):
args.append(
"-DCMAKE_CUDA_ARCHITECTURES={0}".format(cuda_arch))
- if '+petsc' in spec:
- args.append("-DPETSC_DIR='{0}'".format(spec['petsc'].prefix))
-
return args
diff --git a/var/spack/repos/builtin/packages/exaworks/package.py b/var/spack/repos/builtin/packages/exaworks/package.py
index c017d0d04b..c017d0d04b 100755..100644
--- a/var/spack/repos/builtin/packages/exaworks/package.py
+++ b/var/spack/repos/builtin/packages/exaworks/package.py
diff --git a/var/spack/repos/builtin/packages/f3d/package.py b/var/spack/repos/builtin/packages/f3d/package.py
new file mode 100644
index 0000000000..cc06d2cfbc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/f3d/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class F3d(CMakePackage):
+ """A fast and minimalist scriptable 3D viewer."""
+
+ homepage = "https://f3d-app.github.io"
+ url = "https://github.com/f3d-app/f3d/archive/refs/tags/v1.1.1.tar.gz"
+
+ version('1.1.1', sha256='68bdbe3a90f2cd553d5e090a95d3c847e2a2f06abbe225ffecd47d3d29978b0a')
+
+ depends_on('vtk@9:', type='link')
diff --git a/var/spack/repos/builtin/packages/fenics-dolfinx/package.py b/var/spack/repos/builtin/packages/fenics-dolfinx/package.py
index 21e9cd9c16..78f1fee879 100644
--- a/var/spack/repos/builtin/packages/fenics-dolfinx/package.py
+++ b/var/spack/repos/builtin/packages/fenics-dolfinx/package.py
@@ -22,6 +22,7 @@ class FenicsDolfinx(CMakePackage):
variant("kahip", default=False, description="kahip support")
variant("parmetis", default=False, description="parmetis support")
variant("slepc", default=False, description="slepc support")
+ variant("adios2", default=False, description="adios2 support")
depends_on("cmake@3.18:", type="build")
depends_on("pkgconfig", type="build")
@@ -36,6 +37,7 @@ class FenicsDolfinx(CMakePackage):
depends_on("kahip", when="+kahip")
depends_on("parmetis", when="+parmetis")
depends_on("slepc", when="+slepc")
+ depends_on("adios2", when="+adios2")
depends_on("py-fenics-ffcx", type=("build", "run"))
depends_on("py-fenics-ffcx@main", type=("build", "run"), when="@main")
@@ -62,6 +64,8 @@ class FenicsDolfinx(CMakePackage):
'ON' if "+parmetis" in self.spec else 'OFF'),
"-DDOLFINX_ENABLE_SLEPC=%s" % (
'ON' if "+slepc" in self.spec else 'OFF'),
+ "-DDOLFINX_ENABLE_ADIOS2=%s" % (
+ 'ON' if "+adios2" in self.spec else 'OFF'),
"-DPython3_ROOT_DIR=%s" % self.spec['python'].home,
"-DPython3_FIND_STRATEGY=LOCATION",
]
diff --git a/var/spack/repos/builtin/packages/ffr/gfortran_format_30.patch b/var/spack/repos/builtin/packages/ffr/gfortran_format_30.patch
index 1669ca5550..1669ca5550 100755..100644
--- a/var/spack/repos/builtin/packages/ffr/gfortran_format_30.patch
+++ b/var/spack/repos/builtin/packages/ffr/gfortran_format_30.patch
diff --git a/var/spack/repos/builtin/packages/ffr/gfortran_format_31.patch b/var/spack/repos/builtin/packages/ffr/gfortran_format_31.patch
index 8ebd66c72c..8ebd66c72c 100755..100644
--- a/var/spack/repos/builtin/packages/ffr/gfortran_format_31.patch
+++ b/var/spack/repos/builtin/packages/ffr/gfortran_format_31.patch
diff --git a/var/spack/repos/builtin/packages/flecsale/package.py b/var/spack/repos/builtin/packages/flecsale/package.py
index eb8166f263..0c91fa7254 100644
--- a/var/spack/repos/builtin/packages/flecsale/package.py
+++ b/var/spack/repos/builtin/packages/flecsale/package.py
@@ -20,8 +20,8 @@ class Flecsale(CMakePackage):
depends_on("pkgconfig", type='build')
depends_on("cmake@3.1:", type='build')
- depends_on("flecsi~mpi", when='~mpi')
- depends_on("flecsi+mpi", when='+mpi')
+ depends_on("flecsi backend=serial", when='~mpi')
+ conflicts("^flecsi backend=serial", when='+mpi')
depends_on("python")
depends_on("openssl")
depends_on("boost~mpi", when='~mpi')
diff --git a/var/spack/repos/builtin/packages/flecsi/package.py b/var/spack/repos/builtin/packages/flecsi/package.py
index 6767c93331..fba678f846 100644
--- a/var/spack/repos/builtin/packages/flecsi/package.py
+++ b/var/spack/repos/builtin/packages/flecsi/package.py
@@ -110,6 +110,8 @@ class Flecsi(CMakePackage, CudaPackage):
depends_on('mpich@3.4.1:', when='@2.0: ^mpich')
depends_on('openmpi@4.1.0:', when='@2.0: ^openmpi')
+ conflicts('%gcc@:8', when='@2.1:')
+
conflicts('+tutorial', when='backend=hpx')
# FleCSI@2: no longer supports serial or charmpp backends
conflicts('backend=serial', when='@2.0:')
@@ -136,7 +138,7 @@ class Flecsi(CMakePackage, CudaPackage):
# Unit tests require flog support
conflicts('+unit_tests', when='~flog')
# Disallow conduit=none when using legion as a backend
- conflicts('legion conduit=none', when='backend=legion')
+ conflicts('^legion conduit=none', when='backend=legion')
# Due to overhauls of Legion and Gasnet spackages
# flecsi@:1.9 can no longer be built with a usable legion
conflicts('backend=legion', when='@:1.9')
diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py
index ffff450594..5423bc0d2f 100644
--- a/var/spack/repos/builtin/packages/flux-core/package.py
+++ b/var/spack/repos/builtin/packages/flux-core/package.py
@@ -19,6 +19,8 @@ class FluxCore(AutotoolsPackage):
maintainers = ['grondo']
version('master', branch='master')
+
+ version('0.31.0', sha256='a18251de2ca3522484cacfa986df934ba8f98c54586e18940ce5d2c6147a8a7f')
version('0.30.0', sha256='e51fde4464140367ae4bc1b44f960675ea0a6f58eede3a561cacd8a11ca3e776')
version('0.29.0', sha256='c13b40e82d66356e75208a689a495ca01f0a013e2e45ac8ea202ed8224987323')
version('0.28.0', sha256='9a784def7186b0036091bd8d6d8fe5bc3425ab2927e1465e1c9ad266631c285d')
diff --git a/var/spack/repos/builtin/packages/flux-sched/no-valgrind.patch b/var/spack/repos/builtin/packages/flux-sched/no-valgrind.patch
new file mode 100644
index 0000000000..3b828e431e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/flux-sched/no-valgrind.patch
@@ -0,0 +1,19 @@
+diff --git a/t/t5000-valgrind.t b/t/t5000-valgrind.t
+index 08255348..aac546f6 100755
+--- a/t/t5000-valgrind.t
++++ b/t/t5000-valgrind.t
+@@ -6,6 +6,14 @@ test_description='Run broker under valgrind with a small workload'
+ test -n "$FLUX_TESTS_LOGFILE" && set -- "$@" --logfile
+ . `dirname $0`/sharness.sh
+
++# Do not run valgrind test by default unless FLUX_ENABLE_VALGRIND_TEST
++# is set in environment (e.g. by CI), or the test run run with -d, --debug
++#
++if test -z "$FLUX_ENABLE_VALGRIND_TEST" && test "$debug" = ""; then
++ skip_all='skipping valgrind tests since FLUX_ENABLE_VALGRIND_TEST not set'
++ test_done
++fi
++
+ if ! which valgrind >/dev/null; then
+ skip_all='skipping valgrind tests since no valgrind executable found'
+ test_done
diff --git a/var/spack/repos/builtin/packages/flux-sched/package.py b/var/spack/repos/builtin/packages/flux-sched/package.py
index 20ad0d714c..de1b370258 100644
--- a/var/spack/repos/builtin/packages/flux-sched/package.py
+++ b/var/spack/repos/builtin/packages/flux-sched/package.py
@@ -19,6 +19,7 @@ class FluxSched(AutotoolsPackage):
maintainers = ['grondo']
version('master', branch='master')
+ version('0.20.0', sha256='1d2074e1458ba1e7a1d4c33341b9f09769559cd1b8c68edc32097e220c4240b8')
version('0.19.0', sha256='8dffa8eaec95a81286f621639ef851c52dc4c562d365971233bbd91100c31ed2')
version('0.18.0', sha256='a4d8a6444fdb7b857b26f47fdea57992b486c9522f4ff92d5a6f547d95b586ae')
version('0.17.0', sha256='5acfcb757e2294a92eaa91be58ba9b42736b88b42d2937de4a78f4642b1c4933')
@@ -42,7 +43,9 @@ class FluxSched(AutotoolsPackage):
depends_on("py-pyyaml")
depends_on("libedit")
depends_on("libxml2@2.9.1:")
- depends_on("yaml-cpp")
+ # pin yaml-cpp to 0.6.3 due to issue #886
+ # https://github.com/flux-framework/flux-sched/issues/886
+ depends_on("yaml-cpp@0.6.3")
depends_on("uuid")
depends_on("pkgconfig")
@@ -55,6 +58,7 @@ class FluxSched(AutotoolsPackage):
depends_on("flux-core@0.28.0:", when='@0.17.0', type=('build', 'run', 'link'))
depends_on("flux-core@0.29.0:", when='@0.18.0', type=('build', 'run', 'link'))
depends_on("flux-core@0.30.0:", when='@0.19.0', type=('build', 'run', 'link'))
+ depends_on("flux-core@0.31.0:", when='@0.19.0', type=('build', 'run', 'link'))
depends_on("flux-core@master", when='@master', type=('build', 'run', 'link'))
# Need autotools when building on master:
@@ -62,6 +66,10 @@ class FluxSched(AutotoolsPackage):
depends_on("automake", type='build', when='@master')
depends_on("libtool", type='build', when='@master')
+ # Disable t5000-valgrind.t by default due to false positives not yet
+ # in the suppressions file. (This patch will be in v0.21.0)
+ patch('no-valgrind.patch', when='@:0.20.0')
+
def url_for_version(self, version):
'''
Flux uses a fork of ZeroMQ's Collective Code Construction Contract
@@ -103,7 +111,7 @@ class FluxSched(AutotoolsPackage):
bash = which('bash')
bash('./autogen.sh')
- @when('@:0.19')
+ @when('@:0.20')
def patch(self):
"""Fix build with clang@13 and gcc@11"""
filter_file('NULL', 'nullptr', 'resource/schema/sched_data.hpp')
diff --git a/var/spack/repos/builtin/packages/fpm/package.py b/var/spack/repos/builtin/packages/fpm/package.py
index 9401bf636c..f6acf27da9 100644
--- a/var/spack/repos/builtin/packages/fpm/package.py
+++ b/var/spack/repos/builtin/packages/fpm/package.py
@@ -21,6 +21,7 @@ class Fpm(Package):
maintainers = ["awvwgk"]
phases = ["install"]
+ version("0.5.0", "e4a06956d2300f9aa1d06bd3323670480e946549617582e32684ded6921a921e")
version("0.4.0", "cd9b80b7f40d9cf357ca8d5d4fe289fd32dfccb729bad7d2a68f245e4cdd0045")
version("0.3.0", "3368d1b17e2d1368559174c796ce0e184cb6bf79c939938c6d166fbd15959fa3")
@@ -32,6 +33,9 @@ class Fpm(Package):
if "@0.4.0" in self.spec:
env.set("FPM_C_COMPILER", self.compiler.cc)
+ if "@0.5.0" in self.spec:
+ env.set("FPM_CC", self.compiler.cc)
+
fflags = "-O3"
if "+openmp" in self.spec:
fflags += " " + self.compiler.openmp_flag
diff --git a/var/spack/repos/builtin/packages/fraggenescan/package.py b/var/spack/repos/builtin/packages/fraggenescan/package.py
index 70bcd32e72..1f60362fb3 100644
--- a/var/spack/repos/builtin/packages/fraggenescan/package.py
+++ b/var/spack/repos/builtin/packages/fraggenescan/package.py
@@ -15,11 +15,14 @@ class Fraggenescan(MakefilePackage):
url = "https://downloads.sourceforge.net/project/fraggenescan/FragGeneScan1.31.tar.gz"
version('1.31', sha256='cd3212d0f148218eb3b17d24fcd1fc897fb9fee9b2c902682edde29f895f426c')
-
- build_targets = ['clean', 'hmm.obj']
+ version('1.30', sha256='f2d7f0dfa4a5f4bbea295ed865dcbfedf16c954ea1534c2a879ebdcfb8650d95')
def edit(self, spec, prefix):
filter_file('gcc', spack_cc, 'Makefile', string=True)
+ def build(self, spec, prefic):
+ make('clean')
+ make('fgs')
+
def install(self, spec, prefix):
install_tree('.', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/fuse-overlayfs/package.py b/var/spack/repos/builtin/packages/fuse-overlayfs/package.py
index 81103c3103..5322527e6d 100644
--- a/var/spack/repos/builtin/packages/fuse-overlayfs/package.py
+++ b/var/spack/repos/builtin/packages/fuse-overlayfs/package.py
@@ -11,7 +11,11 @@ class FuseOverlayfs(AutotoolsPackage):
homepage = "https://github.com/containers/fuse-overlayfs"
url = "https://github.com/containers/fuse-overlayfs/archive/v1.1.2.tar.gz"
+ maintainers = ['haampie']
+ version('1.7.1', sha256='fe2c076aed7b8669e7970301a99c0b197759b611035d8199de4c0add7d2fb2b4')
+ version('1.7', sha256='e4d9a794d270e237a38e7ced95af95ad15268e0584eab981ed7c7b3758b95995')
+ version('1.6', sha256='5606c5c1e4e0d7063f3f50a72c906f23b7a505784d20dc40fff9e4b2b802d5f4')
version('1.5.0', sha256='6c81b65b71067b303aaa9871f512c2cabc23e2b793f19c6c854d01a492b5a923')
version('1.4.0', sha256='7e5666aef4f2047e6a5202d6438b08c2d314dba5b40e431014e7dbb8168d9018')
version('1.3.0', sha256='91e78a93aac7698c65083deea04952bc86af6abbb0830785ef1dd4a8707ad8bf')
diff --git a/var/spack/repos/builtin/packages/fxdiv/package.py b/var/spack/repos/builtin/packages/fxdiv/package.py
index daaaf2a211..ec9800fcae 100644
--- a/var/spack/repos/builtin/packages/fxdiv/package.py
+++ b/var/spack/repos/builtin/packages/fxdiv/package.py
@@ -23,25 +23,8 @@ class Fxdiv(CMakePackage):
generator = 'Ninja'
- resource(
- name='googletest',
- url='https://github.com/google/googletest/archive/release-1.10.0.zip',
- sha256='94c634d499558a76fa649edb13721dce6e98fb1e7018dfaeba3cd7a083945e91',
- destination='deps',
- placement='googletest',
- )
- resource(
- name='googlebenchmark',
- url='https://github.com/google/benchmark/archive/v1.5.0.zip',
- sha256='2d22dd3758afee43842bb504af1a8385cccb3ee1f164824e4837c1c1b04d92a0',
- destination='deps',
- placement='googlebenchmark',
- )
-
def cmake_args(self):
return [
- self.define('GOOGLETEST_SOURCE_DIR',
- join_path(self.stage.source_path, 'deps', 'googletest')),
- self.define('GOOGLEBENCHMARK_SOURCE_DIR',
- join_path(self.stage.source_path, 'deps', 'googlebenchmark')),
+ self.define('FXDIV_BUILD_TESTS', False),
+ self.define('FXDIV_BUILD_BENCHMARKS', False)
]
diff --git a/var/spack/repos/builtin/packages/g2/package.py b/var/spack/repos/builtin/packages/g2/package.py
index 64b8da9dee..40cd8aa5f9 100644
--- a/var/spack/repos/builtin/packages/g2/package.py
+++ b/var/spack/repos/builtin/packages/g2/package.py
@@ -23,3 +23,10 @@ class G2(CMakePackage):
depends_on('jasper')
depends_on('libpng')
+
+ def setup_run_environment(self, env):
+ for suffix in ('4', 'd'):
+ lib = find_libraries('libg2_' + suffix, root=self.prefix,
+ shared=False, recursive=True)
+ env.set('G2_LIB' + suffix, lib[0])
+ env.set('G2_INC' + suffix, join_path(self.prefix, 'include_' + suffix))
diff --git a/var/spack/repos/builtin/packages/g2c/package.py b/var/spack/repos/builtin/packages/g2c/package.py
index 14c5cabfc7..5496f75f16 100644
--- a/var/spack/repos/builtin/packages/g2c/package.py
+++ b/var/spack/repos/builtin/packages/g2c/package.py
@@ -26,3 +26,8 @@ class G2c(CMakePackage):
depends_on('libpng', when='+png')
depends_on('jasper', when='+jasper')
depends_on('openjpeg', when='+openjpeg')
+
+ def setup_run_environment(self, env):
+ lib = find_libraries('libg2c', root=self.prefix, shared=False, recursive=True)
+ env.set('G2C_LIB', lib[0])
+ env.set('G2C_INC', join_path(self.prefix, 'include'))
diff --git a/var/spack/repos/builtin/packages/g4emlow/package.py b/var/spack/repos/builtin/packages/g4emlow/package.py
index 6d3aaa6f22..95a7c65b7a 100644
--- a/var/spack/repos/builtin/packages/g4emlow/package.py
+++ b/var/spack/repos/builtin/packages/g4emlow/package.py
@@ -17,6 +17,7 @@ class G4emlow(Package):
maintainers = ['drbenmorgan']
# Only versions relevant to Geant4 releases built by spack are added
+ version('8.0', sha256='d919a8e5838688257b9248a613910eb2a7633059e030c8b50c0a2c2ad9fd2b3b')
version('7.13', sha256='374896b649be776c6c10fea80abe6cf32f9136df0b6ab7c7236d571d49fb8c69')
version('7.9.1', sha256='820c106e501c64c617df6c9e33a0f0a3822ffad059871930f74b8cc37f043ccb')
version('7.9', sha256='4abf9aa6cda91e4612676ce4d2d8a73b91184533aa66f9aad19a53a8c4dc3aff')
diff --git a/var/spack/repos/builtin/packages/g4particlexs/package.py b/var/spack/repos/builtin/packages/g4particlexs/package.py
index 982d5fdba4..7ccacd8166 100644
--- a/var/spack/repos/builtin/packages/g4particlexs/package.py
+++ b/var/spack/repos/builtin/packages/g4particlexs/package.py
@@ -18,6 +18,7 @@ class G4particlexs(Package):
maintainers = ['drbenmorgan']
# Only versions relevant to Geant4 releases built by spack are added
+ version('4.0', sha256='9381039703c3f2b0fd36ab4999362a2c8b4ff9080c322f90b4e319281133ca95')
version('3.1.1', sha256='66c17edd6cb6967375d0497add84c2201907a25e33db782ebc26051d38f2afda')
version('3.1', sha256='404da84ead165e5cccc0bb795222f6270c9bf491ef4a0fd65195128b27f0e9cd')
version('2.1', sha256='094d103372bbf8780d63a11632397e72d1191dc5027f9adabaf6a43025520b41')
diff --git a/var/spack/repos/builtin/packages/g4tendl/package.py b/var/spack/repos/builtin/packages/g4tendl/package.py
index ff180c102f..2a27e3fa71 100644
--- a/var/spack/repos/builtin/packages/g4tendl/package.py
+++ b/var/spack/repos/builtin/packages/g4tendl/package.py
@@ -17,6 +17,7 @@ class G4tendl(Package):
maintainers = ['drbenmorgan']
# Only versions relevant to Geant4 releases built by spack are added
+ version('1.4', sha256='4b7274020cc8b4ed569b892ef18c2e088edcdb6b66f39d25585ccee25d9721e0')
version('1.3.2', sha256='3b2987c6e3bee74197e3bd39e25e1cc756bb866c26d21a70f647959fc7afb849')
version('1.3', sha256='52ad77515033a5d6f995c699809b464725a0e62099b5e55bf07c8bdd02cd3bce')
diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py
index 5520fb0f71..6a83354b89 100644
--- a/var/spack/repos/builtin/packages/gcc/package.py
+++ b/var/spack/repos/builtin/packages/gcc/package.py
@@ -261,7 +261,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
# Use -headerpad_max_install_names in the build,
# otherwise updated load commands won't fit in the Mach-O header.
# This is needed because `gcc` avoids the superenv shim.
- patch('darwin/gcc-7.1.0-headerpad.patch', when='@5:')
+ patch('darwin/gcc-7.1.0-headerpad.patch', when='@5:11')
patch('darwin/gcc-6.1.0-jit.patch', when='@5:7')
patch('darwin/gcc-4.9.patch1', when='@4.9.0:4.9.3')
patch('darwin/gcc-4.9.patch2', when='@4.9.0:4.9.3')
diff --git a/var/spack/repos/builtin/packages/gchp/package.py b/var/spack/repos/builtin/packages/gchp/package.py
index 6b5e723536..52e101121b 100644
--- a/var/spack/repos/builtin/packages/gchp/package.py
+++ b/var/spack/repos/builtin/packages/gchp/package.py
@@ -12,10 +12,11 @@ class Gchp(CMakePackage):
"""GEOS-Chem High Performance model of atmospheric chemistry"""
homepage = "https://gchp.readthedocs.io/"
- url = "https://github.com/geoschem/GCHP/archive/13.1.2.tar.gz"
+ url = "https://github.com/geoschem/GCHP/archive/13.2.1.tar.gz"
git = "https://github.com/geoschem/GCHP.git"
- maintainers = ['lizziel']
+ maintainers = ['lizziel', 'laestrada']
+ version('13.2.1', commit='9dc2340cac684971fa961559a4dc3d8818326ab8', submodules=True)
version('13.1.2', commit='106b8f783cafabd699e53beec3a4dd8aee45234b', submodules=True)
version('13.1.1', commit='a17361a78aceab947ca51aa1ecd3391beaa3fcb2', submodules=True)
version('13.1.0', commit='4aca45370738e48623e61e38b26d981d3e20be76', submodules=True)
diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py
index 6760125e00..bea77acf20 100644
--- a/var/spack/repos/builtin/packages/gdal/package.py
+++ b/var/spack/repos/builtin/packages/gdal/package.py
@@ -24,36 +24,36 @@ class Gdal(AutotoolsPackage):
maintainers = ['adamjstewart']
- version('3.3.3', sha256='1e8fc8b19c77238c7f4c27857d04857b65d8b7e8050d3aac256d70fa48a21e76')
- version('3.3.2', sha256='630e34141cf398c3078d7d8f08bb44e804c65bbf09807b3610dcbfbc37115cc3')
- version('3.3.1', sha256='48ab00b77d49f08cf66c60ccce55abb6455c3079f545e60c90ee7ce857bccb70')
- version('3.3.0', sha256='190c8f4b56afc767f43836b2a5cd53cc52ee7fdc25eb78c6079c5a244e28efa7')
- version('3.2.3', sha256='d9ec8458fe97fd02bf36379e7f63eaafce1005eeb60e329ed25bb2d2a17a796f')
- version('3.2.2', sha256='a7e1e414e5c405af48982bf4724a3da64a05770254f2ce8affb5f58a7604ca57')
- version('3.2.1', sha256='6c588b58fcb63ff3f288eb9f02d76791c0955ba9210d98c3abd879c770ae28ea')
- version('3.2.0', sha256='b051f852600ffdf07e337a7f15673da23f9201a9dbb482bd513756a3e5a196a6')
- version('3.1.4', sha256='7b82486f71c71cec61f9b237116212ce18ef6b90f068cbbf9f7de4fc50b576a8')
- version('3.1.3', sha256='161cf55371a143826f1d76ce566db1f0a666496eeb4371aed78b1642f219d51d')
- version('3.1.2', sha256='767c8d0dfa20ba3283de05d23a1d1c03a7e805d0ce2936beaff0bb7d11450641')
- version('3.1.1', sha256='97154a606339a6c1d87c80fb354d7456fe49828b2ef9a3bc9ed91771a03d2a04')
- version('3.1.0', sha256='e754a22242ccbec731aacdb2333b567d4c95b9b02d3ba1ea12f70508d244fcda')
- version('3.0.4', sha256='5569a4daa1abcbba47a9d535172fc335194d9214fdb96cd0f139bb57329ae277')
- version('3.0.3', sha256='e20add5802265159366f197a8bb354899e1693eab8dbba2208de14a457566109')
- version('3.0.2', sha256='c3765371ce391715c8f28bd6defbc70b57aa43341f6e94605f04fe3c92468983')
- version('3.0.1', sha256='45b4ae25dbd87282d589eca76481c426f72132d7a599556470d5c38263b09266')
- version('3.0.0', sha256='ad316fa052d94d9606e90b20a514b92b2dd64e3142dfdbd8f10981a5fcd5c43e')
- version('2.4.4', sha256='a383bd3cf555d6e1169666b01b5b3025b2722ed39e834f1b65090f604405dcd8')
- version('2.4.3', sha256='d52dc3e0cff3af3e898d887c4151442989f416e839948e73f0994f0224bbff60')
- version('2.4.2', sha256='dcc132e469c5eb76fa4aaff238d32e45a5d947dc5b6c801a123b70045b618e0c')
- version('2.4.1', sha256='fd51b4900b2fc49b98d8714f55fc8a78ebfd07218357f93fb796791115a5a1ad')
- version('2.4.0', sha256='c3791dcc6d37e59f6efa86e2df2a55a4485237b0a48e330ae08949f0cdf00f27')
- version('2.3.3', sha256='c3635e41766a648f945d235b922e3c5306e26a2ee5bbd730d2181e242f5f46fe')
- version('2.3.2', sha256='3f6d78fe8807d1d6afb7bed27394f19467840a82bc36d65e66316fa0aa9d32a4')
- version('2.3.1', sha256='9c4625c45a3ee7e49a604ef221778983dd9fd8104922a87f20b99d9bedb7725a')
- version('2.3.0', sha256='6f75e49aa30de140525ccb58688667efe3a2d770576feb7fbc91023b7f552aa2')
- version('2.1.2', sha256='b597f36bd29a2b4368998ddd32b28c8cdf3c8192237a81b99af83cc17d7fa374')
- version('2.0.2', sha256='90f838853cc1c07e55893483faa7e923e4b4b1659c6bc9df3538366030a7e622')
- version('1.11.5', sha256='d4fdc3e987b9926545f0a514b4328cd733f2208442f8d03bde630fe1f7eff042', deprecated=True)
+ version('3.4.0', sha256='ac7bd2bb9436f3fc38bc7309704672980f82d64b4d57627d27849259b8f71d5c')
+ version('3.3.3', sha256='1e8fc8b19c77238c7f4c27857d04857b65d8b7e8050d3aac256d70fa48a21e76')
+ version('3.3.2', sha256='630e34141cf398c3078d7d8f08bb44e804c65bbf09807b3610dcbfbc37115cc3')
+ version('3.3.1', sha256='48ab00b77d49f08cf66c60ccce55abb6455c3079f545e60c90ee7ce857bccb70')
+ version('3.3.0', sha256='190c8f4b56afc767f43836b2a5cd53cc52ee7fdc25eb78c6079c5a244e28efa7')
+ version('3.2.3', sha256='d9ec8458fe97fd02bf36379e7f63eaafce1005eeb60e329ed25bb2d2a17a796f')
+ version('3.2.2', sha256='a7e1e414e5c405af48982bf4724a3da64a05770254f2ce8affb5f58a7604ca57')
+ version('3.2.1', sha256='6c588b58fcb63ff3f288eb9f02d76791c0955ba9210d98c3abd879c770ae28ea')
+ version('3.2.0', sha256='b051f852600ffdf07e337a7f15673da23f9201a9dbb482bd513756a3e5a196a6')
+ version('3.1.4', sha256='7b82486f71c71cec61f9b237116212ce18ef6b90f068cbbf9f7de4fc50b576a8')
+ version('3.1.3', sha256='161cf55371a143826f1d76ce566db1f0a666496eeb4371aed78b1642f219d51d')
+ version('3.1.2', sha256='767c8d0dfa20ba3283de05d23a1d1c03a7e805d0ce2936beaff0bb7d11450641')
+ version('3.1.1', sha256='97154a606339a6c1d87c80fb354d7456fe49828b2ef9a3bc9ed91771a03d2a04')
+ version('3.1.0', sha256='e754a22242ccbec731aacdb2333b567d4c95b9b02d3ba1ea12f70508d244fcda')
+ version('3.0.4', sha256='5569a4daa1abcbba47a9d535172fc335194d9214fdb96cd0f139bb57329ae277')
+ version('3.0.3', sha256='e20add5802265159366f197a8bb354899e1693eab8dbba2208de14a457566109')
+ version('3.0.2', sha256='c3765371ce391715c8f28bd6defbc70b57aa43341f6e94605f04fe3c92468983')
+ version('3.0.1', sha256='45b4ae25dbd87282d589eca76481c426f72132d7a599556470d5c38263b09266')
+ version('3.0.0', sha256='ad316fa052d94d9606e90b20a514b92b2dd64e3142dfdbd8f10981a5fcd5c43e')
+ version('2.4.4', sha256='a383bd3cf555d6e1169666b01b5b3025b2722ed39e834f1b65090f604405dcd8')
+ version('2.4.3', sha256='d52dc3e0cff3af3e898d887c4151442989f416e839948e73f0994f0224bbff60')
+ version('2.4.2', sha256='dcc132e469c5eb76fa4aaff238d32e45a5d947dc5b6c801a123b70045b618e0c')
+ version('2.4.1', sha256='fd51b4900b2fc49b98d8714f55fc8a78ebfd07218357f93fb796791115a5a1ad')
+ version('2.4.0', sha256='c3791dcc6d37e59f6efa86e2df2a55a4485237b0a48e330ae08949f0cdf00f27')
+ version('2.3.3', sha256='c3635e41766a648f945d235b922e3c5306e26a2ee5bbd730d2181e242f5f46fe')
+ version('2.3.2', sha256='3f6d78fe8807d1d6afb7bed27394f19467840a82bc36d65e66316fa0aa9d32a4')
+ version('2.3.1', sha256='9c4625c45a3ee7e49a604ef221778983dd9fd8104922a87f20b99d9bedb7725a')
+ version('2.3.0', sha256='6f75e49aa30de140525ccb58688667efe3a2d770576feb7fbc91023b7f552aa2')
+ version('2.1.2', sha256='b597f36bd29a2b4368998ddd32b28c8cdf3c8192237a81b99af83cc17d7fa374')
+ version('2.0.2', sha256='90f838853cc1c07e55893483faa7e923e4b4b1659c6bc9df3538366030a7e622')
variant('libtool', default=True, description='Use libtool to build the library')
variant('libz', default=True, description='Include libz support')
@@ -155,7 +155,8 @@ class Gdal(AutotoolsPackage):
depends_on('python@2.0:', type=('build', 'link', 'run'), when='@3.2:+python')
depends_on('python', type=('build', 'link', 'run'), when='+python')
# swig/python/setup.py
- depends_on('py-setuptools', type='build', when='+python')
+ depends_on('py-setuptools@:57', type='build', when='@:3.0+python')
+ depends_on('py-setuptools', type='build', when='@3.1:+python')
depends_on('py-numpy@1.0.0:', type=('build', 'run'), when='+python')
depends_on('java@7:', type=('build', 'link', 'run'), when='@3.2:+java')
depends_on('java@6:', type=('build', 'link', 'run'), when='@2.4:+java')
@@ -177,7 +178,7 @@ class Gdal(AutotoolsPackage):
conflicts('%xl_r@:13.0', msg=msg)
conflicts('+mdb', when='~java', msg='MDB driver requires Java')
-
+ conflicts('+mdb', when='@3.5:', msg='MDB driver removed in GDAL 3.5, use ODBC instead')
conflicts('+jasper', when='@3.5:', msg='JPEG2000 driver removed in GDAL 3.5')
conflicts('+perl', when='@3.5:', msg='Perl bindings removed in GDAL 3.5')
diff --git a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
index e317d4d706..3015892ee0 100644
--- a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
+++ b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
@@ -18,13 +18,16 @@ class GdkPixbuf(Package):
list_url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/"
list_depth = 1
+ version('2.42.6', sha256='c4a6b75b7ed8f58ca48da830b9fa00ed96d668d3ab4b1f723dcf902f78bde77f')
version('2.42.2', sha256='83c66a1cfd591d7680c144d2922c5955d38b4db336d7cd3ee109f7bcf9afef15')
- version('2.40.0', sha256='1582595099537ca8ff3b99c6804350b4c058bb8ad67411bbaae024ee7cead4e6')
- version('2.38.2', sha256='73fa651ec0d89d73dd3070b129ce2203a66171dfc0bd2caa3570a9c93d2d0781')
- version('2.38.0', sha256='dd50973c7757bcde15de6bcd3a6d462a445efd552604ae6435a0532fbbadae47')
+ # https://nvd.nist.gov/vuln/detail/CVE-2021-20240
+ version('2.40.0', sha256='1582595099537ca8ff3b99c6804350b4c058bb8ad67411bbaae024ee7cead4e6', deprecated=True)
+ version('2.38.2', sha256='73fa651ec0d89d73dd3070b129ce2203a66171dfc0bd2caa3570a9c93d2d0781', deprecated=True)
+ version('2.38.0', sha256='dd50973c7757bcde15de6bcd3a6d462a445efd552604ae6435a0532fbbadae47', deprecated=True)
version('2.31.2', sha256='9e467ed09894c802499fb2399cd9a89ed21c81700ce8f27f970a833efb1e47aa', deprecated=True)
variant('x11', default=False, description="Enable X11 support")
+ variant('tiff', default=False, description="Enable TIFF support(partially broken)")
# Man page creation was getting docbook errors, see issue #18853
variant('man', default=False, description="Enable man page creation")
@@ -43,7 +46,7 @@ class GdkPixbuf(Package):
depends_on('jpeg')
depends_on('libpng')
depends_on('zlib')
- depends_on('libtiff')
+ depends_on('libtiff', when='+tiff')
depends_on('gobject-introspection')
depends_on('libx11', when='+x11')
@@ -73,11 +76,13 @@ class GdkPixbuf(Package):
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
- meson_args = std_meson_args
- meson_args += [
- '-Dx11={0}'.format('+x11' in spec),
- '-Dman={0}'.format('+man' in spec),
- ]
+ meson_args = std_meson_args + ['-Dman={0}'.format('+man' in spec)]
+ # Based on suggestion by luigi-calori and the fixup shown by lee218llnl:
+ # https://github.com/spack/spack/pull/27254#issuecomment-974464174
+ if '+x11' in spec:
+ if self.version >= Version('2.42'):
+ raise InstallError('+x11 is not valid for {0}'.format(self.version))
+ meson_args += ['-Dx11=true']
meson('..', *meson_args)
ninja('-v')
if self.run_tests:
diff --git a/var/spack/repos/builtin/packages/geant4-data/package.py b/var/spack/repos/builtin/packages/geant4-data/package.py
index c3c91f3b32..a40f78339f 100644
--- a/var/spack/repos/builtin/packages/geant4-data/package.py
+++ b/var/spack/repos/builtin/packages/geant4-data/package.py
@@ -18,6 +18,8 @@ class Geant4Data(BundlePackage):
tags = ['hep']
+ version('11.0.0')
+ version('10.7.3')
version('10.7.2')
version('10.7.1')
version('10.7.0')
@@ -36,6 +38,18 @@ class Geant4Data(BundlePackage):
# For clarity, declare deps on a Major-Minor version basis as
# they generally don't change on the patch level
# Can move to declaring on a dataset basis if needed
+ # geant4@11.0.X
+ depends_on("g4ndl@4.6", when='@11.0.0:11.0')
+ depends_on("g4emlow@8.0", when='@11.0.0:11.0')
+ depends_on("g4photonevaporation@5.7", when='@11.0.0:11.0')
+ depends_on("g4radioactivedecay@5.6", when='@11.0.0:11.0')
+ depends_on("g4particlexs@4.0", when='@11.0.0:11.0')
+ depends_on("g4pii@1.3", when='@11.0.0:11.0')
+ depends_on("g4realsurface@2.2", when='@11.0.0:11.0')
+ depends_on("g4saiddata@2.0", when='@11.0.0:11.0')
+ depends_on("g4abla@3.1", when='@11.0.0:11.0')
+ depends_on("g4incl@1.0", when='@11.0.0:11.0')
+
# geant4@10.7.X
depends_on("g4ndl@4.6", when='@10.7.0:10.7')
depends_on("g4emlow@7.13", when='@10.7.0:10.7')
diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py
index d01c78ec4e..a5eb13abab 100644
--- a/var/spack/repos/builtin/packages/geant4/package.py
+++ b/var/spack/repos/builtin/packages/geant4/package.py
@@ -19,6 +19,8 @@ class Geant4(CMakePackage):
maintainers = ['drbenmorgan']
+ version('11.0.0', sha256='dbfc6b5030a36936b46f56a0bede4b647d0160c178a5629d39ce392124e47936')
+ version('10.7.3', sha256='8615d93bd4178d34f31e19d67bc81720af67cdab1c8425af8523858dcddcf65b', preferred=True)
version('10.7.2', sha256='593fc85883a361487b17548ba00553501f66a811b0a79039276bb75ad59528cf')
version('10.7.1', sha256='2aa7cb4b231081e0a35d84c707be8f35e4edc4e97aad2b233943515476955293')
version('10.7.0', sha256='c991a139210c7f194720c900b149405090058c00beb5a0d2fac5c40c42a262d4')
@@ -37,6 +39,8 @@ class Geant4(CMakePackage):
values=_cxxstd_values,
multi=False,
description='Use the specified C++ standard when building.')
+ conflicts('cxxstd=11', when='@11:', msg='geant4@11: only supports cxxstd=17')
+ conflicts('cxxstd=14', when='@11:', msg='geant4@11: only supports cxxstd=17')
variant('threads', default=True, description='Build with multithreading')
variant('vecgeom', default=False, description='Enable vecgeom support')
@@ -45,10 +49,15 @@ class Geant4(CMakePackage):
variant('motif', default=False, description='Optional motif support')
variant('qt', default=False, description='Enable Qt support')
variant('python', default=False, description='Enable Python bindings')
+ variant('tbb', default=False, description='Use TBB as a tasking backend', when='@11:')
+ variant('vtk', default=False, description='Enable VTK support', when='@11:')
- depends_on('cmake@3.5:', type='build')
+ depends_on('cmake@3.16:', type='build', when='@11.0.0:')
depends_on('cmake@3.8:', type='build', when='@10.6.0:')
+ depends_on('cmake@3.5:', type='build')
+ depends_on('geant4-data@11.0.0', when='@11.0.0')
+ depends_on('geant4-data@10.7.3', when='@10.7.3')
depends_on('geant4-data@10.7.2', when='@10.7.2')
depends_on('geant4-data@10.7.1', when='@10.7.1')
depends_on('geant4-data@10.7.0', when='@10.7.0')
@@ -64,6 +73,9 @@ class Geant4(CMakePackage):
depends_on("expat")
depends_on("zlib")
+ depends_on('tbb', when='+tbb')
+ depends_on('vtk@8.2:', when='+vtk')
+
# Python, with boost requirement dealt with in cxxstd section
depends_on('python@3:', when='+python')
extends('python', when='+python')
@@ -72,6 +84,9 @@ class Geant4(CMakePackage):
for std in _cxxstd_values:
# CLHEP version requirements to be reviewed
+ depends_on('clhep@2.4.5.1: cxxstd=' + std,
+ when='@11.0.0: cxxstd=' + std)
+
depends_on('clhep@2.4.4.0: cxxstd=' + std,
when='@10.7.0: cxxstd=' + std)
@@ -79,9 +94,12 @@ class Geant4(CMakePackage):
when='@10.3.3:10.6 cxxstd=' + std)
# Spack only supports Xerces-c 3 and above, so no version req
- depends_on('xerces-c netaccessor=curl cxxstd=' + std, when='cxxstd=' + std)
+ depends_on('xerces-c netaccessor=curl cxxstd=' + std,
+ when='cxxstd=' + std)
# Vecgeom specific versions for each Geant4 version
+ depends_on('vecgeom@1.1.18:1.1 cxxstd=' + std,
+ when='@11.0.0: +vecgeom cxxstd=' + std)
depends_on('vecgeom@1.1.8:1.1 cxxstd=' + std,
when='@10.7.0: +vecgeom cxxstd=' + std)
depends_on('vecgeom@1.1.5 cxxstd=' + std,
@@ -120,7 +138,6 @@ class Geant4(CMakePackage):
# Core options
options = [
- self.define_from_variant('GEANT4_BUILD_CXXSTD', 'cxxstd'),
'-DGEANT4_USE_SYSTEM_CLHEP=ON',
'-DGEANT4_USE_SYSTEM_EXPAT=ON',
'-DGEANT4_USE_SYSTEM_ZLIB=ON',
@@ -129,6 +146,14 @@ class Geant4(CMakePackage):
'-DXERCESC_ROOT_DIR={0}'.format(spec['xerces-c'].prefix)
]
+ # Use the correct C++ standard option for the requested version
+ if spec.version >= Version('11.0'):
+ options.append(
+ self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'))
+ else:
+ options.append(
+ self.define_from_variant('GEANT4_BUILD_CXXSTD', 'cxxstd'))
+
# Don't install the package cache file as Spack will set
# up CMAKE_PREFIX_PATH etc for the dependencies
if spec.version >= Version('10.6'):
@@ -137,6 +162,8 @@ class Geant4(CMakePackage):
# Multithreading
options.append(self.define_from_variant('GEANT4_BUILD_MULTITHREADED',
'threads'))
+ options.append(self.define_from_variant('GEANT4_USE_TBB', 'tbb'))
+
if '+threads' in spec:
# Locked at global-dynamic to allow use cases that load the
# geant4 libs at application runtime
@@ -170,6 +197,8 @@ class Geant4(CMakePackage):
'-DQT_QMAKE_EXECUTABLE=%s' %
spec['qt'].prefix.bin.qmake)
+ options.append(self.define_from_variant('GEANT4_USE_VTK', 'vtk'))
+
# Python
if spec.version > Version('10.6.1'):
options.append(self.define_from_variant('GEANT4_USE_PYTHON',
diff --git a/var/spack/repos/builtin/packages/gfsio/package.py b/var/spack/repos/builtin/packages/gfsio/package.py
index 430f1b1da7..205771d8fc 100644
--- a/var/spack/repos/builtin/packages/gfsio/package.py
+++ b/var/spack/repos/builtin/packages/gfsio/package.py
@@ -18,3 +18,10 @@ class Gfsio(CMakePackage):
maintainers = ['t-brown', 'kgerheiser', 'Hang-Lei-NOAA', 'edwardhartnett']
version('1.4.1', sha256='eab106302f520600decc4f9665d7c6a55e7b4901fab6d9ef40f29702b89b69b1')
+
+ def setup_run_environment(self, env):
+ lib = find_libraries('libgfsio', root=self.prefix, shared=False, recursive=True)
+ # Only one library version, but still need to set _4 to make NCO happy
+ for suffix in ('4', ''):
+ env.set('GFSIO_LIB' + suffix, lib[0])
+ env.set('GFSIO_INC' + suffix, join_path(self.prefix, 'include'))
diff --git a/var/spack/repos/builtin/packages/ginkgo/1.4.0_dpcpp_use_old_standard.patch b/var/spack/repos/builtin/packages/ginkgo/1.4.0_dpcpp_use_old_standard.patch
new file mode 100644
index 0000000000..54ed07c22d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ginkgo/1.4.0_dpcpp_use_old_standard.patch
@@ -0,0 +1,70 @@
+commit 83164570f0d3511d114114bcc2b02ad23b753ed0
+Author: Yuhsiang M. Tsai <yhmtsai@gmail.com>
+Date: Wed Oct 6 16:33:16 2021 +0200
+
+ syclstd 1.2.1 in new release to propagate subgroup
+ remove 64 subgroup in dense to avoid conj_trans issue on cpu temporarily
+
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index ceb269b1cb..b47388a596 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -67,7 +67,8 @@ if(MSVC)
+ elseif(GINKGO_BUILD_DPCPP OR CMAKE_CXX_COMPILER MATCHES "dpcpp")
+ # For now always use `-ffp-model=precise` with DPC++. This can be removed when
+ # the floating point issues are fixed.
+- set(GINKGO_COMPILER_FLAGS "-Wpedantic;-ffp-model=precise" CACHE STRING
++ # -sycl-std=1.2.1 (or -sycl-std=2017) is temporary workaround after 2021.4 to propagate subgroup setting correctly
++ set(GINKGO_COMPILER_FLAGS "-Wpedantic;-ffp-model=precise;-sycl-std=1.2.1" CACHE STRING
+ "Set the required CXX compiler flags, mainly used for warnings. Current default is `-Wpedantic;-ffp-model=precise`")
+ else()
+ set(GINKGO_COMPILER_FLAGS "-Wpedantic" CACHE STRING
+diff --git a/cmake/create_test.cmake b/cmake/create_test.cmake
+index 9d22406f9a..dcc452b293 100644
+--- a/cmake/create_test.cmake
++++ b/cmake/create_test.cmake
+@@ -40,6 +40,7 @@ function(ginkgo_create_dpcpp_test test_name)
+ add_executable(${test_target_name} ${test_name}.dp.cpp)
+ target_compile_features(${test_target_name} PUBLIC cxx_std_17)
+ target_compile_options(${test_target_name} PRIVATE "${GINKGO_DPCPP_FLAGS}")
++ target_compile_options(${test_target_name} PRIVATE "${GINKGO_COMPILER_FLAGS}")
+ target_link_options(${test_target_name} PRIVATE -fsycl-device-code-split=per_kernel)
+ ginkgo_set_test_target_properties(${test_name} ${test_target_name})
+ # Note: MKL_ENV is empty on linux. Maybe need to apply MKL_ENV to all test.
+diff --git a/dpcpp/CMakeLists.txt b/dpcpp/CMakeLists.txt
+index fee9ec3639..ce71fd5d3c 100644
+--- a/dpcpp/CMakeLists.txt
++++ b/dpcpp/CMakeLists.txt
+@@ -68,6 +68,7 @@ target_compile_definitions(ginkgo_dpcpp PRIVATE GKO_COMPILING_DPCPP)
+
+ set(GINKGO_DPCPP_FLAGS ${GINKGO_DPCPP_FLAGS} PARENT_SCOPE)
+ target_compile_options(ginkgo_dpcpp PRIVATE "${GINKGO_DPCPP_FLAGS}")
++target_compile_options(ginkgo_dpcpp PRIVATE "${GINKGO_COMPILER_FLAGS}")
+ # Note: add MKL as PRIVATE not PUBLIC (MKL example shows) to avoid propagating
+ # find_package(MKL) everywhere when linking ginkgo (see the MKL example
+ # https://software.intel.com/content/www/us/en/develop/documentation/onemkl-windows-developer-guide/top/getting-started/cmake-config-for-onemkl.html)
+diff --git a/dpcpp/matrix/dense_kernels.dp.cpp b/dpcpp/matrix/dense_kernels.dp.cpp
+index 0c89530d1d..9a86ab9cd1 100644
+--- a/dpcpp/matrix/dense_kernels.dp.cpp
++++ b/dpcpp/matrix/dense_kernels.dp.cpp
+@@ -69,14 +69,14 @@ namespace dpcpp {
+ namespace dense {
+
+
++// Disable the 64 subgroup. CPU supports 64 now, but conj_transpose will
++// lead CL_OUT_OF_RESOURCES. TODO: investigate this issue.
+ using KCFG_1D = ConfigSet<11, 7>;
+ constexpr auto kcfg_1d_list =
+- syn::value_list<std::uint32_t, KCFG_1D::encode(512, 64),
+- KCFG_1D::encode(512, 32), KCFG_1D::encode(512, 16),
+- KCFG_1D::encode(256, 32), KCFG_1D::encode(256, 16),
+- KCFG_1D::encode(256, 8)>();
+-constexpr auto subgroup_list =
+- syn::value_list<std::uint32_t, 64, 32, 16, 8, 4>();
++ syn::value_list<std::uint32_t, KCFG_1D::encode(512, 32),
++ KCFG_1D::encode(512, 16), KCFG_1D::encode(256, 32),
++ KCFG_1D::encode(256, 16), KCFG_1D::encode(256, 8)>();
++constexpr auto subgroup_list = syn::value_list<std::uint32_t, 32, 16, 8, 4>();
+ constexpr auto kcfg_1d_array = syn::as_array(kcfg_1d_list);
+ constexpr int default_block_size = 256;
+
diff --git a/var/spack/repos/builtin/packages/ginkgo/package.py b/var/spack/repos/builtin/packages/ginkgo/package.py
index 4518f3cce1..694ae2702e 100644
--- a/var/spack/repos/builtin/packages/ginkgo/package.py
+++ b/var/spack/repos/builtin/packages/ginkgo/package.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
import sys
from spack import *
@@ -31,6 +32,7 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
variant('shared', default=True, description='Build shared libraries')
variant('full_optimizations', default=False, description='Compile with all optimizations')
variant('openmp', default=sys.platform != 'darwin', description='Build with OpenMP')
+ variant('oneapi', default=False, description='Build with oneAPI support')
variant('develtools', default=False, description='Compile with develtools enabled')
variant('hwloc', default=False, description='Enable HWLOC support')
variant('build_type', default='Release',
@@ -49,9 +51,13 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
depends_on('googletest', type="test")
depends_on('numactl', type="test", when="+hwloc")
+ depends_on('intel-oneapi-mkl', when="+oneapi")
+ depends_on('intel-oneapi-dpl', when="+oneapi")
+
conflicts('%gcc@:5.2.9')
conflicts("+rocm", when="@:1.1.1")
conflicts("+cuda", when="+rocm")
+ conflicts("+openmp", when="+oneapi")
# ROCm 4.1.0 breaks platform settings which breaks Ginkgo's HIP support.
conflicts("^hip@4.1.0:", when="@:1.3.0")
@@ -63,18 +69,39 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
patch('1.4.0_skip_invalid_smoke_tests.patch', when='@master')
patch('1.4.0_skip_invalid_smoke_tests.patch', when='@1.4.0')
+ # Newer DPC++ compilers use the updated SYCL 2020 standard which change
+ # kernel attribute propagation rules. This doesn't work well with the
+ # initial Ginkgo oneAPI support.
+ patch('1.4.0_dpcpp_use_old_standard.patch', when='+oneapi @master')
+ patch('1.4.0_dpcpp_use_old_standard.patch', when='+oneapi @1.4.0')
+
+ def setup_build_environment(self, env):
+ spec = self.spec
+ if '+oneapi' in spec:
+ env.set('MKLROOT',
+ join_path(spec['intel-oneapi-mkl'].prefix,
+ 'mkl', 'latest'))
+ env.set('DPL_ROOT',
+ join_path(spec['intel-oneapi-dpl'].prefix,
+ 'dpl', 'latest'))
+
def cmake_args(self):
# Check that the have the correct C++ standard is available
if self.spec.satisfies('@:1.2.0'):
try:
self.compiler.cxx11_flag
except UnsupportedCompilerFlag:
- InstallError('Ginkgo requires a C++11-compliant C++ compiler')
+ raise InstallError('Ginkgo requires a C++11-compliant C++ compiler')
else:
try:
self.compiler.cxx14_flag
except UnsupportedCompilerFlag:
- InstallError('Ginkgo requires a C++14-compliant C++ compiler')
+ raise InstallError('Ginkgo requires a C++14-compliant C++ compiler')
+
+ cxx_is_dpcpp = os.path.basename(self.compiler.cxx) == "dpcpp"
+ if self.spec.satisfies('+oneapi') and not cxx_is_dpcpp:
+ raise InstallError("Ginkgo's oneAPI backend requires the" +
+ "DPC++ compiler as main CXX compiler.")
spec = self.spec
from_variant = self.define_from_variant
@@ -85,6 +112,7 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
from_variant('BUILD_SHARED_LIBS', 'shared'),
from_variant('GINKGO_JACOBI_FULL_OPTIMIZATIONS', 'full_optimizations'),
from_variant('GINKGO_BUILD_HWLOC', 'hwloc'),
+ from_variant('GINKGO_BUILD_DPCPP', 'oneapi'),
from_variant('GINKGO_DEVEL_TOOLS', 'develtools'),
# As we are not exposing benchmarks, examples, tests nor doc
# as part of the installation, disable building them altogether.
diff --git a/var/spack/repos/builtin/packages/gl2ps/package.py b/var/spack/repos/builtin/packages/gl2ps/package.py
index cd8005eb4b..5a5f0f390c 100644
--- a/var/spack/repos/builtin/packages/gl2ps/package.py
+++ b/var/spack/repos/builtin/packages/gl2ps/package.py
@@ -13,6 +13,7 @@ class Gl2ps(CMakePackage):
homepage = "https://www.geuz.org/gl2ps/"
url = "https://geuz.org/gl2ps/src/gl2ps-1.3.9.tgz"
+ version('1.4.2', sha256='8d1c00c1018f96b4b97655482e57dcb0ce42ae2f1d349cd6d4191e7848d9ffe9')
version('1.4.0', sha256='03cb5e6dfcd87183f3b9ba3b22f04cd155096af81e52988cc37d8d8efe6cf1e2')
version('1.3.9', sha256='8a680bff120df8bcd78afac276cdc38041fed617f2721bade01213362bcc3640')
diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py
index 5180973e26..6707c49e1d 100644
--- a/var/spack/repos/builtin/packages/glib/package.py
+++ b/var/spack/repos/builtin/packages/glib/package.py
@@ -23,6 +23,7 @@ class Glib(Package):
maintainers = ['michaelkuhn']
+ version('2.70.2', sha256='0551459c85cd3da3d58ddc9016fd28be5af503f5e1615a71ba5b512ac945806f')
version('2.70.0', sha256='200d7df811c5ba634afbf109f14bb40ba7fde670e89389885da14e27c0840742')
version('2.68.4', sha256='62fd061d08a75492617e625a73e2c05e259f831acbb8e1f8b9c81f23f7993a3b')
version('2.68.3', sha256='e7e1a3c20c026109c45c9ec4a31d8dcebc22e86c69486993e565817d64be3138')
diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py
index 1ed6a1a9b1..008c2be316 100644
--- a/var/spack/repos/builtin/packages/go/package.py
+++ b/var/spack/repos/builtin/packages/go/package.py
@@ -43,9 +43,11 @@ class Go(Package):
maintainers = ['alecbcs']
+ version('1.17.3', sha256='705c64251e5b25d5d55ede1039c6aa22bea40a7a931d14c370339853643c3df0')
version('1.17.2', sha256='2255eb3e4e824dd7d5fcdc2e7f84534371c186312e546fb1086a34c17752f431')
version('1.17.1', sha256='49dc08339770acd5613312db8c141eaf61779995577b89d93b541ef83067e5b1')
version('1.17', sha256='3a70e5055509f347c0fb831ca07a2bf3b531068f349b14a3c652e9b5b67beb5d')
+ version('1.16.10', sha256='a905472011585e403d00d2a41de7ced29b8884309d73482a307f689fd0f320b5')
version('1.16.9', sha256='0a1cc7fd7bd20448f71ebed64d846138850d5099b18cf5cc10a4fc45160d8c3d')
version('1.16.6', sha256='a3a5d4bc401b51db065e4f93b523347a4d343ae0c0b08a65c3423b05a138037d')
version('1.16.5', sha256='7bfa7e5908c7cc9e75da5ddf3066d7cbcf3fd9fa51945851325eebc17f50ba80')
diff --git a/var/spack/repos/builtin/packages/goshimmer/package.py b/var/spack/repos/builtin/packages/goshimmer/package.py
new file mode 100644
index 0000000000..e440321f57
--- /dev/null
+++ b/var/spack/repos/builtin/packages/goshimmer/package.py
@@ -0,0 +1,40 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Goshimmer(Package):
+ """GoShimmer is a prototype node for exploring the implementation of IOTA 2.0"""
+ homepage = 'https://github.com/iotaledger/goshimmer'
+ url = 'https://github.com/iotaledger/goshimmer/archive/refs/tags/v0.8.0.tar.gz'
+
+ maintainers = ['bernhardkaindl']
+
+ version('0.8.0', 'ec515deebf0dd35ff76cd98addae9cfcbf4758ab9eb72eb0f6ff4654f2658980')
+
+ depends_on('go@1.16:', type='build')
+ depends_on('snappy')
+ depends_on('rocksdb')
+ depends_on('zstd')
+ depends_on('zlib')
+ depends_on('lz4')
+
+ @property
+ def snapbin(self):
+ return join_path(self.prefix.bin, 'snapshot.bin')
+
+ def patch(self):
+ for file in ['tools/genesis-snapshot/main', 'plugins/messagelayer/parameters']:
+ filter_file('./snapshot.bin', self.snapbin, file + '.go')
+
+ def install(self, spec, prefix):
+ which('go')('build', '-modcacherw', '-tags', 'rocksdb,netgo')
+ mkdir(prefix.bin)
+ install('config.default.json', prefix.bin)
+ install('goshimmer', prefix.bin)
+ which('wget')('-O', self.snapbin,
+ 'https://dbfiles-goshimmer.s3.eu-central-1.amazonaws.com/snapshots/nectar/snapshot-latest.bin')
+ remove_linked_tree(prefix.pkg)
diff --git a/var/spack/repos/builtin/packages/gpgme/package.py b/var/spack/repos/builtin/packages/gpgme/package.py
index 7d0695cc28..8522c04552 100644
--- a/var/spack/repos/builtin/packages/gpgme/package.py
+++ b/var/spack/repos/builtin/packages/gpgme/package.py
@@ -11,12 +11,20 @@ class Gpgme(AutotoolsPackage):
functions from programming languages."""
homepage = "https://www.gnupg.org/software/gpgme/index.html"
- url = "https://www.gnupg.org/ftp/gcrypt/gpgme/gpgme-1.12.0.tar.bz2"
+ url = "https://www.gnupg.org/ftp/gcrypt/gpgme/gpgme-1.16.0.tar.bz2"
executables = ['^gpgme-config$']
+ version('1.16.0', sha256='6c8cc4aedb10d5d4c905894ba1d850544619ee765606ac43df7405865de29ed0')
version('1.12.0', sha256='b4dc951c3743a60e2e120a77892e9e864fb936b2e58e7c77e8581f4d050e8cd8')
+ # https://dev.gnupg.org/T5509 - New test t-edit-sign test crashes with GCC 11.1.0
+ patch(
+ 'https://git.gnupg.org/cgi-bin/gitweb.cgi?p=gpgme.git;a=commitdiff_plain;h=81a33ea5e1b86d586b956e893a5b25c4cd41c969;hp=e8e055e682f8994d62012574e1c8d862ca72a35d',
+ sha256='b934e3cb0b3408ad27990d97b594c89801a4748294e2eb5804a455a312821411',
+ when='@1.16.0',
+ )
+
depends_on('gnupg', type='build')
depends_on('libgpg-error', type='build')
depends_on('libassuan', type='build')
@@ -24,3 +32,24 @@ class Gpgme(AutotoolsPackage):
@classmethod
def determine_version(cls, exe):
return Executable(exe)('--version', output=str, error=str).rstrip()
+
+ def configure_args(self):
+ """Fix the build when incompatible Qt libraries are installed on the host"""
+ return ['--enable-languages=cpp']
+
+ def setup_build_environment(self, env):
+ """Build tests create a public keyring in ~/.gnupg if $HOME is not redirected"""
+ if self.run_tests:
+ env.set('HOME', self.build_directory)
+ env.prepend_path('LD_LIBRARY_PATH', self.spec['libgpg-error'].prefix.lib)
+
+ @property
+ def make_tests(self):
+ """Use the Makefile's tests variable to control if the build tests shall run"""
+ return 'tests=tests' if self.run_tests else 'tests='
+
+ def build(self, spec, prefix):
+ make(self.make_tests)
+
+ def install(self, spec, prefix):
+ make(self.make_tests, 'install')
diff --git a/var/spack/repos/builtin/packages/gpi-2/package.py b/var/spack/repos/builtin/packages/gpi-2/package.py
index 20ebe86a32..353a46f5e7 100644
--- a/var/spack/repos/builtin/packages/gpi-2/package.py
+++ b/var/spack/repos/builtin/packages/gpi-2/package.py
@@ -18,7 +18,7 @@ class Gpi2(AutotoolsPackage):
url = 'https://github.com/cc-hpc-itwm/GPI-2/archive/refs/tags/v1.5.1.tar.gz'
git = 'https://github.com/cc-hpc-itwm/GPI-2.git'
- maintainers = ['robert-mijakovic', 'acastanedam']
+ maintainers = ['robert-mijakovic', 'acastanedam', 'mzeyen1985']
version('develop', branch='next')
version('master', branch='master')
@@ -26,6 +26,7 @@ class Gpi2(AutotoolsPackage):
version('1.5.1', sha256='4dac7e9152694d2ec4aefd982a52ecc064a8cb8f2c9eab0425428127c3719e2e')
version('1.5.0', sha256='ee299ac1c08c38c9e7871d4af745f1075570ddbb708bb62d82257244585e5183')
version('1.4.0', sha256='3b8ffb45346b2fe56aaa7ba15a515e62f9dff45a28e6a014248e20094bbe50a1')
+ version('1.3.3', sha256='923a848009e7dcd9d26c317ede68b50289b2a9297eb10a75dcc34a4d49f3cdcc')
version('1.3.2', sha256='83dbfb2e4bed28ef4e2ae430d30505874b4b50252e2f31dc422b3bc191a87ab0')
version('1.3.1', sha256='414fa352e7b478442e6f5d0b51ff00deeb4fc705de805676c0e68829f3f30967')
version('1.3.0', sha256='ffaa5c6abfbf79aec6389ab7caaa2c8a91bce24fd046d9741418ff815cd445d2')
@@ -67,29 +68,29 @@ class Gpi2(AutotoolsPackage):
depends_on('slurm', when='schedulers=slurm')
conflicts('%gcc@10:', when='@:1.3.2', msg='gcc>10 is not supported')
- conflicts('schedulers=slurm', when='@:1.3.2', msg='Slurm is not supported')
+ conflicts('schedulers=slurm', when='@:1.3.3', msg='Slurm is not supported')
def set_specific_cflags(self, spec):
- if spec.satisfies('@:1.4.0%gcc@10.1.0:'):
+ if spec.satisfies('@1.4.0%gcc@10.1.0:'):
environ['CFLAGS'] = '-fcommon'
# GPI-2 without autotools
- @when('@:1.3.2')
+ @when('@:1.3.3')
def autoreconf(self, spec, prefix):
touch = which('touch')
touch('configure')
pass
- @when('@:1.3.2')
+ @when('@:1.3.3')
def configure(self, spec, prefix):
pass
- @when('@:1.3.2')
+ @when('@:1.3.3')
def build(self, spec, prefix):
self.old_install(spec, prefix)
pass
- @when('@:1.3.2')
+ @when('@:1.3.3')
def old_install(self, spec, prefix):
spec = self.spec
@@ -113,7 +114,7 @@ class Gpi2(AutotoolsPackage):
install = which('./install.sh')
install(*config_args)
- @when('@:1.3.2')
+ @when('@:1.3.3')
def install(self, spec, prefix):
pass
diff --git a/var/spack/repos/builtin/packages/gptune/package.py b/var/spack/repos/builtin/packages/gptune/package.py
new file mode 100644
index 0000000000..e7d952efad
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gptune/package.py
@@ -0,0 +1,173 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack import *
+
+
+class Gptune(CMakePackage):
+ """GPTune is an autotuning framework that relies on multitask and transfer
+ learnings to help solve the underlying black-box optimization problem using
+ Bayesian optimization methodologies."""
+
+ homepage = "https://gptune.lbl.gov/"
+ git = "https://github.com/gptune/GPTune.git"
+ maintainers = ['liuyangzhuan']
+
+ version('master', branch='master')
+
+ variant('app', default=False, description='Build all HPC application examples')
+
+ depends_on('mpi', type=('build', 'link', 'run'))
+ depends_on('cmake@3.3:', type='build')
+ depends_on('jq', type='run')
+ depends_on('blas', type='link')
+ depends_on('lapack', type='link')
+ depends_on('scalapack', type='link')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-ipyparallel', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-pandas', type=('build', 'run'))
+ depends_on('py-joblib', type=('build', 'run'))
+ depends_on('py-scikit-learn', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-pyyaml', type=('build', 'run'))
+ depends_on('py-scikit-optimize@master+gptune', type=('build', 'run'))
+ depends_on('py-gpy', type=('build', 'run'))
+ depends_on('py-lhsmdu', type=('build', 'run'))
+ depends_on('py-hpbandster', type=('build', 'run'))
+ depends_on('py-opentuner', type=('build', 'run'))
+ depends_on('py-ytopt-autotune@1.1.0', type=('build', 'run'))
+ depends_on('py-filelock', type=('build', 'run'))
+ depends_on('py-requests', type=('build', 'run'))
+ depends_on('py-cython', type=('build', 'run'))
+ depends_on('py-pyaml', type=('build', 'run'))
+ depends_on('py-mpi4py@3.0.3:', type=('build', 'run'))
+ depends_on('pygmo', type=('build', 'run'))
+ depends_on('openturns', type=('build', 'run'))
+
+ depends_on('superlu-dist@develop', when='+app', type=('build', 'run'))
+
+ conflicts('openmpi@:3')
+
+ def cmake_args(self):
+ spec = self.spec
+ fc_flags = []
+ if '%gcc@10:' in spec or self.spec.satisfies('%apple-clang@11:'):
+ fc_flags.append('-fallow-argument-mismatch')
+
+ args = [
+ '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(";"),
+ '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(";"),
+ '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack'].
+ libs.joined(";"),
+ '-DCMAKE_Fortran_FLAGS=' + ''.join(fc_flags),
+ '-DBUILD_SHARED_LIBS=ON',
+ ]
+
+ return args
+
+ examples_src_dir = 'examples'
+ src_dir = 'GPTune'
+ nodes = 1
+ cores = 4
+
+ @run_after('install')
+ def cache_test_sources(self):
+ """Copy the example source files after the package is installed to an
+ install test subdirectory for use during `spack test run`."""
+ self.cache_extra_test_sources([self.examples_src_dir, self.src_dir])
+
+ def test(self):
+ spec = self.spec
+ comp_name = self.compiler.name
+ comp_version = str(self.compiler.version).replace('.', ',')
+ test_dir = join_path(self.install_test_root, self.examples_src_dir)
+
+ if '+app' in spec:
+ superludriver = join_path(spec['superlu-dist'].prefix.bin, 'pddrive_spawn')
+ op = ['-r', superludriver, '.']
+ # copy superlu-dist executables to the correct place
+ with working_dir(join_path(test_dir, 'SuperLU_DIST'), create=False):
+ self.run_test('rm', options=['-rf', 'superlu_dist'], work_dir='.')
+ self.run_test('git', options=['clone', 'https://github.com/xiaoyeli/superlu_dist.git'], work_dir='.')
+ self.run_test('mkdir', options=['-p',
+ 'build'], work_dir='./superlu_dist')
+ self.run_test('mkdir', options=['-p', 'EXAMPLE'],
+ work_dir='./superlu_dist/build')
+ self.run_test('cp', options=op, work_dir='./superlu_dist/build/EXAMPLE')
+
+ with working_dir(self.install_test_root, create=False):
+ cdir = join_path(self.prefix, 'gptuneclcm')
+ self.run_test('cp', options=['-r', cdir, '.'], work_dir='.')
+ self.run_test('rm', options=['-rf', 'build'], work_dir='.')
+ self.run_test('mv', options=['gptuneclcm', 'build'], work_dir='.')
+
+ with open('{0}/run_env.sh'.format(self.install_test_root), 'w') as envfile:
+ envfile.write('if [[ $NERSC_HOST = "cori" ]]; then\n')
+ envfile.write(' export machine=cori\n')
+ envfile.write('elif [[ $(uname -s) = "Darwin" ]]; then\n')
+ envfile.write(' export machine=mac\n')
+ envfile.write('elif [[ $(dnsdomainname) = ' +
+ '"summit.olcf.ornl.gov" ]]; then\n')
+ envfile.write(' export machine=summit\n')
+ envfile.write('elif [[ $(cat /etc/os-release | grep "PRETTY_NAME") ==' +
+ ' *"Ubuntu"* || $(cat /etc/os-release | grep' +
+ ' "PRETTY_NAME") == *"Debian"* ]]; then\n')
+ envfile.write(' export machine=unknownlinux\n')
+ envfile.write('fi\n')
+ envfile.write('export GPTUNEROOT=$PWD\n')
+ envfile.write('export MPIRUN={0}\n'.format
+ (which(spec['mpi'].prefix.bin + '/mpirun')))
+ envfile.write('export proc=$(spack arch)\n')
+ envfile.write('export mpi={0}\n'.format(spec['mpi'].name))
+ envfile.write('export compiler={0}\n'.format(comp_name))
+ envfile.write('export nodes={0} \n'.format(self.nodes))
+ envfile.write('export cores={0} \n'.format(self.cores))
+ envfile.write('export ModuleEnv=$machine-$proc-$mpi-$compiler \n')
+ envfile.write('software_json=$(echo ",\\\"software_configuration\\\":' +
+ '{\\\"' + spec['blas'].name +
+ '\\\":{\\\"version_split\\\":' +
+ ' [' + str(spec['blas'].versions).replace('.', ',') +
+ ']},\\\"' + spec['mpi'].name +
+ '\\\":{\\\"version_split\\\": [' +
+ str(spec['mpi'].versions).replace('.', ',') + ']},\\\"' +
+ spec['scalapack'].name +
+ '\\\":{\\\"version_split\\\": [' +
+ str(spec['scalapack'].versions).replace('.', ',') +
+ ']},\\\"' +
+ str(comp_name) + '\\\":{\\\"version_split\\\": [' +
+ str(comp_version) + ']}}") \n')
+ envfile.write('loadable_software_json=$(echo ",\\\"loadable_software_' +
+ 'configurations\\\":{\\\"' + spec['blas'].name +
+ '\\\":{\\\"version_split\\\": [' +
+ str(spec['blas'].versions).replace('.', ',') +
+ ']},\\\"' + spec['mpi'].name +
+ '\\\":{\\\"version_split\\\": [' +
+ str(spec['mpi'].versions).replace('.', ',') + ']},\\\"' +
+ spec['scalapack'].name +
+ '\\\":{\\\"version_split\\\": [' +
+ str(spec['scalapack'].versions).replace('.', ',') +
+ ']},\\\"' + str(comp_name) +
+ '\\\":{\\\"version_split\\\": ['
+ + str(comp_version) + ']}}") \n')
+ envfile.write('machine_json=$(echo ",\\\"machine_configuration\\\":' +
+ '{\\\"machine_name\\\":\\\"$machine\\\",\\\"$proc\\\":' +
+ '{\\\"nodes\\\":$nodes,\\\"cores\\\":$cores}}") \n')
+ envfile.write('loadable_machine_json=$(echo ",\\\"loadable_machine_' +
+ 'configurations\\\":{\\\"$machine\\\":{\\\"$proc\\\":' +
+ '{\\\"nodes\\\":$nodes,\\\"cores\\\":$cores}}}") \n')
+
+ if '+app' in spec:
+ apps = ['GPTune-Demo', 'SuperLU_DIST', 'SuperLU_DIST_RCI',
+ 'Scalapack-PDGEQRF', 'Scalapack-PDGEQRF_RCI']
+ else:
+ apps = ['GPTune-Demo', 'Scalapack-PDGEQRF', 'Scalapack-PDGEQRF_RCI']
+
+ for app in apps:
+ with working_dir(join_path(test_dir, app), create=False):
+ # PDGEQRF with GPTune
+ self.run_test('bash', options=['run_examples.sh'], work_dir='.',
+ purpose='gptune smoke test for {0}'.format(app))
diff --git a/var/spack/repos/builtin/packages/gpu-burn/package.py b/var/spack/repos/builtin/packages/gpu-burn/package.py
index 15d2dd5b8f..87a2832842 100644
--- a/var/spack/repos/builtin/packages/gpu-burn/package.py
+++ b/var/spack/repos/builtin/packages/gpu-burn/package.py
@@ -32,9 +32,9 @@ class GpuBurn(MakefilePackage, CudaPackage):
fh.write('drv:\n')
fh.write('\tnvcc {0} -fatbin '
'compare.cu -o compare.ptx\n'.format(archflag))
- fh.write('\tg++ -O3 -c gpu_burn-drv.cpp\n')
- fh.write('\tg++ -o gpu_burn gpu_burn-drv.o -O3 -lcuda '
- '-lcublas -lcudart -o gpu_burn\n')
+ fh.write('\t{0} -O3 -c gpu_burn-drv.cpp\n'.format(spack_cxx))
+ fh.write('\t{0} -o gpu_burn gpu_burn-drv.o -O3 -lcuda '
+ '-lcublas -lcudart -o gpu_burn\n'.format(spack_cxx))
filter_file('compare.ptx',
join_path(prefix.share,
diff --git a/var/spack/repos/builtin/packages/grnboost/package.py b/var/spack/repos/builtin/packages/grnboost/package.py
index 35a802bf40..dff74e5031 100644
--- a/var/spack/repos/builtin/packages/grnboost/package.py
+++ b/var/spack/repos/builtin/packages/grnboost/package.py
@@ -22,7 +22,7 @@ class Grnboost(Package):
depends_on('sbt', type='build')
depends_on('java', type=('build', 'run'))
- depends_on('xgboost+jvm-packages', type='run')
+ depends_on('xgboost', type='run')
depends_on('spark+hadoop', type='run')
def setup_run_environment(self, env):
diff --git a/var/spack/repos/builtin/packages/groff/package.py b/var/spack/repos/builtin/packages/groff/package.py
index ce2cc490bc..1525e11d3e 100644
--- a/var/spack/repos/builtin/packages/groff/package.py
+++ b/var/spack/repos/builtin/packages/groff/package.py
@@ -43,6 +43,7 @@ class Groff(AutotoolsPackage, GNUMirrorPackage):
depends_on('texinfo', type='build', when='@1.22.4:')
# configure complains when there is no uchardet that enhances preconv
depends_on('uchardet', when='@1.22.4:')
+ depends_on('pkgconfig', type='build')
depends_on('libice', when='+x')
depends_on('libxaw', when='+x')
diff --git a/var/spack/repos/builtin/packages/gromacs-chain-coordinate/package.py b/var/spack/repos/builtin/packages/gromacs-chain-coordinate/package.py
index e36660aba4..37be793c1a 100644
--- a/var/spack/repos/builtin/packages/gromacs-chain-coordinate/package.py
+++ b/var/spack/repos/builtin/packages/gromacs-chain-coordinate/package.py
@@ -3,18 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-# This is a partial copy of Spack Gromacs package
-# - modified URL and versions
-# - removed Plumed patches
-# - calling original patch and cmake-related procedures to not duplicate them
-# - simplified variants/dependencies because this fork starts at Gromacs 2021
+from spack.pkg.builtin.gromacs import Gromacs
-import os
-from spack.pkg.builtin.gromacs import Gromacs as BuiltinGromacs
-
-
-class GromacsChainCoordinate(CMakePackage):
+class GromacsChainCoordinate(Gromacs):
"""
A modification of GROMACS that implements the "chain coordinate", a reaction
coordinate for pore formation in membranes and stalk formation between membranes.
@@ -26,57 +18,27 @@ class GromacsChainCoordinate(CMakePackage):
maintainers = ['w8jcik']
version('main', branch='main')
+
version('2021.2-0.1', sha256="879fdd04662370a76408b72c9fbc4aff60a6387b459322ac2700d27359d0dd87",
url="https://gitlab.com/cbjh/gromacs-chain-coordinate/-/archive/release-2021.chaincoord-0.1/gromacs-chain-coordinate-release-2021.chaincoord-0.1.tar.bz2",
preferred=True)
- variant('mpi', default=True,
- description='Activate MPI support (disable for Thread-MPI support)')
- variant('shared', default=True,
- description='Enables the build of shared libraries')
- variant(
- 'double', default=False,
- description='Produces a double precision version of the executables')
- variant('cuda', default=False, description='Enable CUDA support')
- variant('opencl', default=False, description='Enable OpenCL support')
- variant('sycl', default=False, description='Enable SYCL support')
- variant('nosuffix', default=False, description='Disable default suffixes')
- variant('build_type', default='RelWithDebInfo',
- description='The build type to build',
- values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel',
- 'Reference', 'RelWithAssert', 'Profile'))
- variant('openmp', default=True,
- description='Enables OpenMP at configure time')
- variant('hwloc', default=True,
- description='Use the hwloc portable hardware locality library')
- variant('lapack', default=False,
- description='Enables an external LAPACK library')
- variant('blas', default=False,
- description='Enables an external BLAS library')
- variant('cycle_subcounters', default=False,
- description='Enables cycle subcounters')
+ conflicts('+plumed')
- depends_on('mpi', when='+mpi')
- depends_on('fftw-api@3')
- depends_on('cmake@3.16.0:3', type='build')
- depends_on('cuda', when='+cuda')
- depends_on('sycl', when='+sycl')
- depends_on('lapack', when='+lapack')
- depends_on('blas', when='+blas')
- depends_on('hwloc', when='+hwloc')
+ def remove_parent_versions(self):
+ """
+ By inheriting GROMACS package we also inherit versions.
+ They are not valid, so we are removing them.
+ """
- filter_compiler_wrappers(
- '*.cmake',
- relative_root=os.path.join('share', 'cmake', 'gromacs_mpi'))
- filter_compiler_wrappers(
- '*.cmake',
- relative_root=os.path.join('share', 'cmake', 'gromacs'))
+ for version_key in Gromacs.versions.keys():
+ if version_key in self.versions:
+ del self.versions[version_key]
- def patch(self):
- BuiltinGromacs.patch(self)
+ def __init__(self, spec):
+ super(GromacsChainCoordinate, self).__init__(spec)
- def cmake_args(self):
- return super(GromacsChainCoordinate, self).cmake_args()
+ self.remove_parent_versions()
def check(self):
"""The default 'test' targets does not compile the test programs"""
diff --git a/var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectCpu-cmake-3.14.patch b/var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectCpu-cmake-3.14.patch
new file mode 100644
index 0000000000..534ac4baab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectCpu-cmake-3.14.patch
@@ -0,0 +1,11 @@
+--- a/cmake/gmxDetectCpu.cmake
++++ b/cmake/gmxDetectCpu.cmake
+@@ -83,7 +83,7 @@ function(gmx_run_cpu_detection TYPE)
+ set(GCC_INLINE_ASM_DEFINE "-DGMX_X86_GCC_INLINE_ASM=0")
+ endif()
+
+- set(_compile_definitions "${GCC_INLINE_ASM_DEFINE} -I${PROJECT_SOURCE_DIR}/src -DGMX_CPUINFO_STANDALONE ${GMX_STDLIB_CXX_FLAGS} -DGMX_TARGET_X86=${GMX_TARGET_X86_VALUE}")
++ set(_compile_definitions ${GCC_INLINE_ASM_DEFINE} -I${PROJECT_SOURCE_DIR}/src -DGMX_CPUINFO_STANDALONE ${GMX_STDLIB_CXX_FLAGS} -DGMX_TARGET_X86=${GMX_TARGET_X86_VALUE})
+ set(LINK_LIBRARIES "${GMX_STDLIB_LIBRARIES}")
+ try_compile(CPU_DETECTION_COMPILED
+ "${PROJECT_BINARY_DIR}"
diff --git a/var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectSimd-cmake-3.14.patch b/var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectSimd-cmake-3.14.patch
new file mode 100644
index 0000000000..f30ec78a7a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gromacs-swaxs/gmxDetectSimd-cmake-3.14.patch
@@ -0,0 +1,11 @@
+--- a/cmake/gmxDetectSimd.cmake
++++ b/cmake/gmxDetectSimd.cmake
+@@ -77,7 +77,7 @@ function(gmx_suggest_simd _suggested_simd)
+ else()
+ set(GMX_TARGET_X86_VALUE 0)
+ endif()
+- set(_compile_definitions "${GCC_INLINE_ASM_DEFINE} -I${CMAKE_SOURCE_DIR}/src -DGMX_CPUINFO_STANDALONE ${GMX_STDLIB_CXX_FLAGS} -DGMX_TARGET_X86=${GMX_TARGET_X86_VALUE}")
++ set(_compile_definitions ${GCC_INLINE_ASM_DEFINE} -I${CMAKE_SOURCE_DIR}/src -DGMX_CPUINFO_STANDALONE ${GMX_STDLIB_CXX_FLAGS} -DGMX_TARGET_X86=${GMX_TARGET_X86_VALUE})
+
+ # Prepare a default suggestion
+ set(OUTPUT_SIMD "None")
diff --git a/var/spack/repos/builtin/packages/gromacs-swaxs/package.py b/var/spack/repos/builtin/packages/gromacs-swaxs/package.py
new file mode 100644
index 0000000000..e1d4e0e80a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gromacs-swaxs/package.py
@@ -0,0 +1,67 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.pkg.builtin.gromacs import Gromacs
+
+
+class GromacsSwaxs(Gromacs):
+ """Modified Gromacs for small-angle scattering calculations (SAXS/WAXS/SANS)"""
+
+ homepage = 'https://biophys.uni-saarland.de/swaxs.html'
+ url = 'https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2019.swaxs-0.1/gromacs-swaxs-release-2019.swaxs-0.1.tar.bz2'
+ git = 'https://gitlab.com/cbjh/gromacs-swaxs.git'
+ maintainers = ['w8jcik']
+
+ version('2021.4-0.1', sha256='eda1c8a7aae6001ef40480addf9fff9cdccc7e2b80480e36d069f50d6f2be26e',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2021.swaxs-0.1/gromacs-swaxs-release-2021.swaxs-0.1.tar.bz2')
+
+ version('2020.6-0.1', sha256='3d8360a3cc9144772786bddaa11e3dbc37d6a466b99f3314bf3928261c2fddcf',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2020.swaxs-0.1/gromacs-swaxs-release-2020.swaxs-0.1.tar.bz2')
+
+ version('2019.6-0.2', sha256='a45eeee3983a4443d3a40ea417770f3abd93f43eb80e021fd9d6830e414565cb',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2019.swaxs-0.2/gromacs-swaxs-release-2019.swaxs-0.2.tar.bz2')
+
+ version('2019.6-0.1', sha256='91da09eed80646d6a1c500be78891bef22623a19795a9bc89adf9f2ec4f85635',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2019.swaxs-0.1/gromacs-swaxs-release-2019.swaxs-0.1.tar.bz2')
+
+ version('2018.8-0.3', sha256='5e94d636fda28e81ff1f3cff2c9f6e7f458bf496f4d1ed7bc10e911bd98b303c',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2018.swaxs-0.3/gromacs-swaxs-release-2018.swaxs-0.3.tar.bz2')
+
+ version('2018.8-0.2', sha256='f8bf0d363334a9117a2a8deb690dadaa826b73b57a761949c7846a13b84b5af5',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2018.swaxs-0.2/gromacs-swaxs-release-2018.swaxs-0.2.tar.bz2')
+
+ version('2018.8-0.1', sha256='478f45286dfedb8f01c2d5bf0773a391c2de2baf85283ef683e911bc43e24675',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2018.swaxs-0.1/gromacs-swaxs-release-2018.swaxs-0.1.tar.bz2')
+
+ version('2016.6-0.1', sha256='11e8ae6b3141f356bae72b595737a1f253b878d313169703ba33a69ded01a04e',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-2016.swaxs-0.1/gromacs-swaxs-release-2016.swaxs-0.1.tar.bz2')
+
+ version('5.1.5-0.3', sha256='a9e8382eec3cc0d943e1869f13945ea4a971a95a70eb314c1f26a17fa7d03792',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-5-1.swaxs-0.3/gromacs-swaxs-release-5-1.swaxs-0.3.tar.bz2')
+
+ version('5.0.7-0.5', sha256='7f7f69726472a641a5443f1993a6e1fb8cfa9c74aeaf46e8c5d1db37005ece79',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-5-0.swaxs-0.5/gromacs-swaxs-release-5-0.swaxs-0.5.tar.bz2')
+
+ version('4.6.7-0.8', sha256='1cfa34fe9ff543b665cd556f3395a9aa67f916110ba70255c97389eafe8315a2',
+ url='https://gitlab.com/cbjh/gromacs-swaxs/-/archive/release-4-6.swaxs-0.8/gromacs-swaxs-release-4-6.swaxs-0.8.tar.bz2')
+
+ conflicts('+plumed')
+ conflicts('+opencl')
+ conflicts('+sycl')
+
+ def remove_parent_versions(self):
+ """
+ By inheriting GROMACS package we also inherit versions.
+ They are not valid, so we are removing them.
+ """
+
+ for version_key in Gromacs.versions.keys():
+ if version_key in self.versions:
+ del self.versions[version_key]
+
+ def __init__(self, spec):
+ super(GromacsSwaxs, self).__init__(spec)
+
+ self.remove_parent_versions()
diff --git a/var/spack/repos/builtin/packages/grpc/package.py b/var/spack/repos/builtin/packages/grpc/package.py
index f7ad7d21f9..074ce4137d 100644
--- a/var/spack/repos/builtin/packages/grpc/package.py
+++ b/var/spack/repos/builtin/packages/grpc/package.py
@@ -55,9 +55,9 @@ class Grpc(CMakePackage):
def cmake_args(self):
args = [
- define_from_variant('BUILD_SHARED_LIBS', 'shared'),
- define_from_variant('gRPC_BUILD_CODEGEN', 'codegen'),
- define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
+ self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
+ self.define_from_variant('gRPC_BUILD_CODEGEN', 'codegen'),
+ self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
'-DgRPC_BUILD_CSHARP_EXT:Bool=OFF',
'-DgRPC_INSTALL:Bool=ON',
# Tell grpc to skip vendoring and look for deps via find_package:
diff --git a/var/spack/repos/builtin/packages/gtk-doc/package.py b/var/spack/repos/builtin/packages/gtk-doc/package.py
index 1cb992b3a5..5cebdafaa6 100644
--- a/var/spack/repos/builtin/packages/gtk-doc/package.py
+++ b/var/spack/repos/builtin/packages/gtk-doc/package.py
@@ -15,8 +15,9 @@ class GtkDoc(AutotoolsPackage):
pdf/man-pages with some extra work."""
homepage = "https://wiki.gnome.org/DocumentationProject/GtkDoc"
- url = 'https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/GTK_DOC_1_32/gtk-doc-GTK_DOC_1_32.tar.gz'
+ url = 'https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/1.33.2/gtk-doc-1.33.2.tar.gz'
+ version('1.33.2', sha256='2d1b0cbd26edfcb54694b2339106a02a81d630a7dedc357461aeb186874cc7c0')
version('1.32', sha256='0890c1f00d4817279be51602e67c4805daf264092adc58f9c04338566e8225ba')
# Commented out until package dblatex has been created
@@ -26,7 +27,7 @@ class GtkDoc(AutotoolsPackage):
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
- depends_on('pkgconfig', type='build')
+ depends_on('pkgconfig@0.19:', type='build')
depends_on('python@3.2:', type=('build', 'run'))
depends_on('py-pygments', type=('build', 'run'))
@@ -35,7 +36,7 @@ class GtkDoc(AutotoolsPackage):
depends_on('py-parameterized', type=('test'))
depends_on('py-six', type=('test'))
depends_on('libxslt')
- depends_on('libxml2')
+ depends_on('libxml2@2.3.6:')
depends_on('docbook-xsl@1.78.1')
depends_on('docbook-xml@4.3')
# depends_on('dblatex', when='+pdf')
@@ -57,8 +58,13 @@ class GtkDoc(AutotoolsPackage):
def url_for_version(self, version):
"""Handle gnome's version-based custom URLs."""
- url = 'https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/GTK_DOC_{0}/gtk-doc-GTK_DOC_{0}.tar.gz'
- return url.format(version.underscored)
+
+ if version <= Version('1.32'):
+ url = 'https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/GTK_DOC_{0}/gtk-doc-GTK_DOC_{0}.tar.gz'
+ return url.format(version.underscored)
+
+ url = 'https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/{0}/gtk-doc-{0}.tar.gz'
+ return url.format(version)
def configure_args(self):
args = [
diff --git a/var/spack/repos/builtin/packages/gtkplus/package.py b/var/spack/repos/builtin/packages/gtkplus/package.py
index 8bb2a781eb..270b628efa 100644
--- a/var/spack/repos/builtin/packages/gtkplus/package.py
+++ b/var/spack/repos/builtin/packages/gtkplus/package.py
@@ -61,7 +61,7 @@ class Gtkplus(MesonPackage):
'', 'configure', string=True)
# https://gitlab.gnome.org/GNOME/gtk/-/issues/3776
- if self.spec.satisfies('%gcc@11:'):
+ if self.spec.satisfies('@3:%gcc@11:'):
filter_file(" '-Werror=array-bounds',",
'', 'meson.build', string=True)
@@ -114,7 +114,7 @@ class Gtkplus(MesonPackage):
@when('@:3.20.10')
def meson(self, spec, prefix):
- configure(*self.configure_args)
+ configure(*self.configure_args())
@when('@:3.20.10')
def build(self, spec, prefix):
@@ -123,3 +123,7 @@ class Gtkplus(MesonPackage):
@when('@:3.20.10')
def install(self, spec, prefix):
make('install')
+
+ def check(self):
+ """All build time checks open windows in the X server, don't do that"""
+ pass
diff --git a/var/spack/repos/builtin/packages/harfbuzz/package.py b/var/spack/repos/builtin/packages/harfbuzz/package.py
index fa384bca57..227c9ccb2a 100644
--- a/var/spack/repos/builtin/packages/harfbuzz/package.py
+++ b/var/spack/repos/builtin/packages/harfbuzz/package.py
@@ -9,9 +9,10 @@ from spack import *
class Harfbuzz(AutotoolsPackage):
"""The Harfbuzz package contains an OpenType text shaping engine."""
homepage = "https://github.com/harfbuzz/harfbuzz"
- url = "https://www.freedesktop.org/software/harfbuzz/release/harfbuzz-0.9.37.tar.bz2"
+ url = "https://github.com/harfbuzz/harfbuzz/releases/download/2.9.1/harfbuzz-2.9.1.tar.xz"
git = "https://github.com/harfbuzz/harfbuzz.git"
+ version('2.9.1', sha256='0edcc980f526a338452180e701d6aba6323aef457b6686976a7d17ccbddc51cf')
version('2.6.8', sha256='6648a571a27f186e47094121f0095e1b809e918b3037c630c7f38ffad86e3035')
version('2.3.1', sha256='f205699d5b91374008d6f8e36c59e419ae2d9a7bb8c5d9f34041b9a5abcae468')
version('2.1.3', sha256='613264460bb6814c3894e3953225c5357402915853a652d40b4230ce5faf0bee')
diff --git a/var/spack/repos/builtin/packages/harminv/package.py b/var/spack/repos/builtin/packages/harminv/package.py
index 17f7a3cee9..42a2f6d96c 100644
--- a/var/spack/repos/builtin/packages/harminv/package.py
+++ b/var/spack/repos/builtin/packages/harminv/package.py
@@ -24,9 +24,11 @@ class Harminv(AutotoolsPackage):
def configure_args(self):
spec = self.spec
+ lapack = spec['lapack'].libs
+ blas = spec['blas'].libs
return [
'--enable-shared',
- '--with-blas={0}'.format(spec['blas'].prefix.lib),
- '--with-lapack={0}'.format(spec['lapack'].prefix.lib),
+ '--with-blas={0}'.format(blas.ld_flags),
+ '--with-lapack={0}'.format(lapack.ld_flags),
]
diff --git a/var/spack/repos/builtin/packages/hdf-eos5/package.py b/var/spack/repos/builtin/packages/hdf-eos5/package.py
index 6d20fce1a0..7e8956de62 100644
--- a/var/spack/repos/builtin/packages/hdf-eos5/package.py
+++ b/var/spack/repos/builtin/packages/hdf-eos5/package.py
@@ -97,7 +97,7 @@ class HdfEos5(AutotoolsPackage):
extra_args.append('--with-hdf5={0}'.format(self.spec['hdf5'].prefix))
if 'szip' in self.spec:
extra_args.append('--with-szlib={0}'.format(
- self.spec['libszip'].prefix))
+ self.spec['szip'].prefix))
if 'zlib' in self.spec:
extra_args.append('--with-zlib={0}'.format(
self.spec['zlib'].prefix))
diff --git a/var/spack/repos/builtin/packages/hdf5-vol-async/package.py b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py
new file mode 100644
index 0000000000..c1ecfc4c85
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Hdf5VolAsync(CMakePackage):
+ """This package enables asynchronous IO in HDF5."""
+
+ homepage = "https://sdm.lbl.gov/"
+ git = "https://github.com/hpc-io/vol-async"
+ maintainers = ['hyoklee']
+
+ version('v1.0')
+ depends_on('argobots@main')
+ depends_on('hdf5@develop-1.13+mpi+threadsafe')
+
+ def cmake_args(self):
+ """Populate cmake arguments for HDF5 VOL."""
+ args = [
+ self.define('BUILD_SHARED_LIBS:BOOL', True),
+ self.define('BUILD_TESTING:BOOL=ON', self.run_tests)
+ ]
+ return args
diff --git a/var/spack/repos/builtin/packages/hdf5-vol-external-passthrough/package.py b/var/spack/repos/builtin/packages/hdf5-vol-external-passthrough/package.py
new file mode 100644
index 0000000000..ba06dad2d4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hdf5-vol-external-passthrough/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Hdf5VolExternalPassthrough(CMakePackage):
+ """Package for HDF5 external pass-through VOL."""
+
+ homepage = "https://sdm.lbl.gov/"
+ url = "https://github.com/hpc-io/vol-external-passthrough/archive/refs/tags/v1.0.tar.gz"
+ git = "https://github.com/hpc-io/vol-external-passthrough.git"
+ maintainers = ['hyoklee']
+
+ version('develop', branch='develop')
+ version('1.0', sha256='99a06d1c31451f8f0c8c10fec112410cda1f951f0eda1bd0ca999d6b35cf7740')
+ depends_on('hdf5@1.13.0:')
+
+ def cmake_args(self):
+ args = [
+ self.define('BUILD_SHARED_LIBS:BOOL', True),
+ self.define('BUILD_TESTING:BOOL=ON', self.run_tests)
+ ]
+ return args
diff --git a/var/spack/repos/builtin/packages/hdf5-vol-log/package.py b/var/spack/repos/builtin/packages/hdf5-vol-log/package.py
index c65ff31628..0af7af27cf 100644
--- a/var/spack/repos/builtin/packages/hdf5-vol-log/package.py
+++ b/var/spack/repos/builtin/packages/hdf5-vol-log/package.py
@@ -15,9 +15,9 @@ class Hdf5VolLog(AutotoolsPackage):
git = 'https://github.com/DataLib-ECP/vol-log-based.git'
maintainers = ['hyoklee']
- version('master', commit='b13778efd9e0c79135a9d7352104985408078d45')
+ version('master', commit='28b854e50c53166010d97eccdc23f7f3ef6a5b03')
- depends_on('hdf5@1.12.1:')
+ depends_on('hdf5@1.13.0:')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py
index 26db199cd9..96367cb23b 100644
--- a/var/spack/repos/builtin/packages/hdf5/package.py
+++ b/var/spack/repos/builtin/packages/hdf5/package.py
@@ -17,7 +17,7 @@ class Hdf5(CMakePackage):
"""
homepage = "https://portal.hdfgroup.org"
- url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.7/src/hdf5-1.10.7.tar.gz"
+ url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.8/src/hdf5-1.10.8.tar.gz"
list_url = "https://support.hdfgroup.org/ftp/HDF5/releases"
list_depth = 3
git = "https://github.com/HDFGroup/hdf5.git"
@@ -35,12 +35,14 @@ class Hdf5(CMakePackage):
version('develop-1.10', branch='hdf5_1_10')
version('develop-1.8', branch='hdf5_1_8')
+ version('1.13.0', sha256='3049faf900f0c52e09ea4cddfb83af057615f2fc1cc80eb5202dd57b09820115')
version('1.12.1', sha256='79c66ff67e666665369396e9c90b32e238e501f345afd2234186bfb8331081ca')
version('1.12.0', sha256='a62dcb276658cb78e6795dd29bf926ed7a9bc4edf6e77025cd2c689a8f97c17a')
# HDF5 1.12 broke API compatibility, so we currently prefer the latest
# 1.10 release. packages that want later versions of HDF5 should specify,
# e.g., depends_on("hdf5@1.12:") to get 1.12 or higher.
- version('1.10.7', sha256='7a1a0a54371275ce2dfc5cd093775bb025c365846512961e7e5ceaecb437ef15', preferred=True)
+ version('1.10.8', sha256='d341b80d380dd763753a0ebe22915e11e87aac4e44a084a850646ff934d19c80', preferred=True)
+ version('1.10.7', sha256='7a1a0a54371275ce2dfc5cd093775bb025c365846512961e7e5ceaecb437ef15')
version('1.10.6', sha256='5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa')
version('1.10.5', sha256='6d4ce8bf902a97b050f6f491f4268634e252a63dadd6656a1a9be5b7b7726fa8')
version('1.10.4', sha256='8f60dc4dd6ab5fcd23c750d1dc5bca3d0453bdce5c8cdaf0a4a61a9d1122adb2')
diff --git a/var/spack/repos/builtin/packages/helib/package.py b/var/spack/repos/builtin/packages/helib/package.py
new file mode 100644
index 0000000000..07d141ff54
--- /dev/null
+++ b/var/spack/repos/builtin/packages/helib/package.py
@@ -0,0 +1,51 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Helib(CMakePackage):
+ """HElib is an open-source (Apache License v2.0) software library that
+ implements homomorphic encryption (HE).
+
+ Currently available schemes are the implementations of the
+ Brakerski-Gentry-Vaikuntanathan (BGV) scheme
+ with bootstrapping and the Approximate Number scheme of Cheon-Kim-Kim-Song
+ (CKKS), along with many optimizations to make homomorphic evaluation run
+ faster, focusing mostly on effective use of the Smart-Vercauteren
+ ciphertext packing techniques and the Gentry-Halevi-Smart optimizations.
+ """
+
+ homepage = "https://github.com/homenc/HElib"
+ url = "https://github.com/homenc/HElib/archive/refs/tags/v2.2.1.tar.gz"
+
+ maintainers = ['wohlbier']
+
+ version('2.2.1', sha256='cbe030c752c915f1ece09681cadfbe4f140f6752414ab000b4cf076b6c3019e4')
+ version('2.2.0', sha256='e5f82fb0520a76eafdf5044a1f17f512999479d899da8c34335da5e193699b94')
+ version('2.1.0', sha256='641af0f602cfc7f5f5b1cfde0652252def2dfaf5f7962c2595cf598663637951')
+ version('2.0.0', sha256='4e371807fe052ca27dce708ea302495a8dae8d1196e16e86df424fb5b0e40524')
+ version('1.3.1', sha256='8ef47092f6b15fbb484a21f9184e7d936c360198515b6efb9a55d3dfbc2ea4be')
+ version('1.3.0', sha256='9f69dc5be9197f9ab8cdd81af9a59c12968a0ee11d595b1b1438707ff5405694')
+ version('1.2.0', sha256='17e0448a3255ab01a1ebd8382f9d08a318e3d192b56d062a1fd65fbb0aadaf67')
+ version('1.1.0-beta.0', sha256='6a454b029f3805101f714f50ae5199e2b2b86c1e520a659f130837810eabe4b5')
+ version('1.1.0', sha256='77a912ed3c86f8bde31b7d476321d0c2d810570c04a60fa95c4bd32a1955b5cf')
+ version('1.0.2', sha256='b907eaa8381af3d001d7fb8383273f4c652415b3320c11d5be2ad8f19757c998')
+
+ variant('shared', default=False, description='Build shared library.')
+ depends_on('gmp@6.2.1:')
+ depends_on('ntl@11.5.1:')
+ depends_on('ntl+shared', when='+shared')
+
+ def cmake_args(self):
+ spec = self.spec
+ args = [
+ self.define('ENABLE_TEST', 'ON'),
+ self.define('GMP_DIR', spec['gmp'].prefix),
+ self.define('NTL_DIR', spec['ntl'].prefix),
+ self.define_from_variant('BUILD_SHARED', 'shared')
+ ]
+
+ return args
diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py
index ff76861362..19c774a225 100644
--- a/var/spack/repos/builtin/packages/hiop/package.py
+++ b/var/spack/repos/builtin/packages/hiop/package.py
@@ -6,7 +6,7 @@
from spack import *
-class Hiop(CMakePackage, CudaPackage):
+class Hiop(CMakePackage, CudaPackage, ROCmPackage):
"""HiOp is an optimization solver for solving certain mathematical
optimization problems expressed as nonlinear programming problems.
HiOp is a lightweight HPC solver that leverages application"s existing
@@ -18,6 +18,8 @@ class Hiop(CMakePackage, CudaPackage):
maintainers = ['ashermancinelli', 'CameronRutherford']
# Most recent tagged snapshot is the preferred version when profiling.
+ version('0.5.3', commit='698e8d0fdc0ff9975d8714339ff8c782b70d85f9')
+ version('0.5.2', commit='662ad76dee1f501f648a8bec9a490cb5881789e9')
version('0.5.1', commit='6789bbb55824e68e428c2df1009d647af81f9cf1')
version('0.5.0', commit='a39da8025037c7c8ae2eb31234eb80cc73bec2af')
version('0.4.6', commit='b72d163d52c9225c3196ceb2baebdc7cf09a69de')
@@ -60,15 +62,24 @@ class Hiop(CMakePackage, CudaPackage):
depends_on('mpi', when='+mpi')
- depends_on('magma', when='+cuda')
+ depends_on('magma+cuda', when='+cuda')
+ depends_on('magma+rocm', when='+rocm')
depends_on('magma@2.5.4:', when='@0.4:+cuda')
depends_on('magma@2.6.1:', when='@0.4.6:+cuda')
+ depends_on('magma@2.5.4:', when='@0.4:+rocm')
+ depends_on('magma@2.6.1:', when='@0.4.6:+rocm')
depends_on('raja+openmp', when='+raja')
depends_on('raja@0.14.0:', when='@0.5.0:+raja')
depends_on('raja+cuda', when='+raja+cuda')
+ depends_on('raja+rocm', when='+raja+rocm')
depends_on('umpire', when='+raja')
depends_on('umpire+cuda~shared', when='+raja+cuda')
+ depends_on('umpire+rocm', when='+raja+rocm')
+ depends_on('umpire@6.0.0:', when='@0.5.0:+raja')
+ depends_on('hip', when='+rocm')
+ depends_on('hipblas', when='+rocm')
+ depends_on('hipsparse', when='+rocm')
depends_on('suite-sparse', when='+kron')
@@ -87,6 +98,10 @@ class Hiop(CMakePackage, CudaPackage):
args = []
spec = self.spec
+ if spec.satisfies('+rocm') or spec.satisfies('+cuda'):
+ args.append('-DHIOP_USE_GPU=ON')
+ args.append('-DHIOP_USE_MAGMA=ON')
+
args.extend([
self.define('HIOP_BUILD_STATIC', True),
self.define('LAPACK_FOUND', True),
@@ -95,9 +110,8 @@ class Hiop(CMakePackage, CudaPackage):
self.define_from_variant('HIOP_BUILD_SHARED', 'shared'),
self.define_from_variant('HIOP_USE_MPI', 'mpi'),
self.define_from_variant('HIOP_DEEPCHECKS', 'deepchecking'),
- self.define_from_variant('HIOP_USE_GPU', 'cuda'),
self.define_from_variant('HIOP_USE_CUDA', 'cuda'),
- self.define_from_variant('HIOP_USE_MAGMA', 'cuda'),
+ self.define_from_variant('HIOP_USE_HIP', 'rocm'),
self.define_from_variant('HIOP_USE_RAJA', 'raja'),
self.define_from_variant('HIOP_USE_UMPIRE', 'raja'),
self.define_from_variant('HIOP_WITH_KRON_REDUCTION', 'kron'),
diff --git a/var/spack/repos/builtin/packages/hipblas/link-clients-blas.patch b/var/spack/repos/builtin/packages/hipblas/link-clients-blas.patch
new file mode 100644
index 0000000000..18e599d9ab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hipblas/link-clients-blas.patch
@@ -0,0 +1,24 @@
+diff -r -u a/clients/benchmarks/CMakeLists.txt b/clients/benchmarks/CMakeLists.txt
+--- a/clients/benchmarks/CMakeLists.txt 2021-10-28 14:14:41.379987882 -0600
++++ b/clients/benchmarks/CMakeLists.txt 2021-10-28 13:37:50.409696036 -0600
+@@ -53,7 +53,7 @@
+ $<BUILD_INTERFACE:${HIP_INCLUDE_DIRS}>
+ )
+
+-target_link_libraries( hipblas-bench PRIVATE hipblas_fortran_client roc::hipblas cblas lapack)
++target_link_libraries( hipblas-bench PRIVATE hipblas_fortran_client roc::hipblas cblas lapack blas)
+
+ # need mf16c flag for float->half convertion
+ target_compile_options( hipblas-bench PRIVATE -mf16c)
+diff -r -u a/clients/gtest/CMakeLists.txt b/clients/gtest/CMakeLists.txt
+--- a/clients/gtest/CMakeLists.txt 2021-10-28 14:14:41.379987882 -0600
++++ b/clients/gtest/CMakeLists.txt 2021-10-28 13:37:34.609274623 -0600
+@@ -146,7 +146,7 @@
+ ${ROCM_PATH}/hsa/include
+ )
+
+-target_link_libraries( hipblas-test PRIVATE roc::hipblas cblas lapack ${GTEST_LIBRARIES} ${Boost_LIBRARIES} hipblas_fortran_client )
++target_link_libraries( hipblas-test PRIVATE roc::hipblas cblas lapack blas ${GTEST_LIBRARIES} ${Boost_LIBRARIES} hipblas_fortran_client )
+
+ # need mf16c flag for float->half convertion
+ target_compile_options( hipblas-test PRIVATE -mf16c )
diff --git a/var/spack/repos/builtin/packages/hipblas/package.py b/var/spack/repos/builtin/packages/hipblas/package.py
index e211ea6980..e56c6aa432 100644
--- a/var/spack/repos/builtin/packages/hipblas/package.py
+++ b/var/spack/repos/builtin/packages/hipblas/package.py
@@ -29,6 +29,16 @@ class Hipblas(CMakePackage):
variant('build_type', default='Release', values=("Release", "Debug", "RelWithDebInfo"), description='CMake build type')
+ depends_on('googletest@1.10.0:', type='test')
+ depends_on('netlib-lapack@3.7.1:', type='test')
+ depends_on('boost@1.64.0: cxxstd=14', type='test')
+
+ patch('link-clients-blas.patch', when='@4.3.0:')
+
+ def check(self):
+ exe = join_path(self.build_directory, 'clients', 'staging', 'hipblas-test')
+ self.run_test(exe)
+
for ver in ['3.5.0', '3.7.0', '3.8.0', '3.9.0', '3.10.0', '4.0.0', '4.1.0',
'4.2.0', '4.3.0', '4.3.1']:
depends_on('hip@' + ver, when='@' + ver)
@@ -42,7 +52,7 @@ class Hipblas(CMakePackage):
# Make sure find_package(HIP) finds the module.
self.define('CMAKE_MODULE_PATH', self.spec['hip'].prefix.cmake),
self.define('BUILD_CLIENTS_SAMPLES', 'OFF'),
- self.define('BUILD_CLIENTS_TESTS', 'OFF')
+ self.define('BUILD_CLIENTS_TESTS', self.run_tests)
]
# hipblas actually prefers CUDA over AMD GPUs when you have it
diff --git a/var/spack/repos/builtin/packages/hpcg/package.py b/var/spack/repos/builtin/packages/hpcg/package.py
index faf22d712b..9f3fccd645 100644
--- a/var/spack/repos/builtin/packages/hpcg/package.py
+++ b/var/spack/repos/builtin/packages/hpcg/package.py
@@ -33,8 +33,12 @@ class Hpcg(AutotoolsPackage):
def configure(self, spec, prefix):
CXXFLAGS = '-O3 -ffast-math -ftree-vectorize '
- if '%aocc' not in self.spec:
+ if not spec.satisfies('%aocc') and not spec.satisfies('%cce'):
CXXFLAGS += ' -ftree-vectorizer-verbose=0 '
+ if spec.satisfies('%cce'):
+ CXXFLAGS += ' -Rpass=loop-vectorize'
+ CXXFLAGS += ' -Rpass-missed=loop-vectorize'
+ CXXFLAGS += ' -Rpass-analysis=loop-vectorize '
if '+openmp' in self.spec:
CXXFLAGS += self.compiler.openmp_flag
config = [
diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py
index 42eaf1aba9..f7d188932b 100644
--- a/var/spack/repos/builtin/packages/hpctoolkit/package.py
+++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import llnl.util.tty as tty
+
from spack import *
@@ -20,6 +22,8 @@ class Hpctoolkit(AutotoolsPackage):
tags = ['e4s']
+ test_requires_compiler = True
+
version('develop', branch='develop')
version('master', branch='master')
version('2021.10.15', commit='a8f289e4dc87ff98e05cfc105978c09eb2f5ea16')
@@ -202,3 +206,36 @@ class Hpctoolkit(AutotoolsPackage):
if '+viewer' in spec:
env.prepend_path('PATH', spec['hpcviewer'].prefix.bin)
env.prepend_path('MANPATH', spec['hpcviewer'].prefix.share.man)
+
+ # Build tests (spack install --run-tests). Disable the default
+ # spack tests and run autotools 'make check', but only from the
+ # tests directory.
+ build_time_test_callbacks = []
+ install_time_test_callbacks = []
+
+ @run_after('install')
+ @on_package_attributes(run_tests=True)
+ def check_install(self):
+ if self.spec.satisfies('@master'):
+ with working_dir('tests'):
+ make('check')
+ else:
+ tty.warn('spack test for hpctoolkit requires branch master')
+
+ # Post-Install tests (spack test run). These are the same tests
+ # but with a different Makefile that works outside the build
+ # directory.
+ @run_after('install')
+ def copy_test_files(self):
+ if self.spec.satisfies('@master'):
+ self.cache_extra_test_sources(['tests'])
+
+ def test(self):
+ test_dir = join_path(self.test_suite.current_test_cache_dir, 'tests')
+ if self.spec.satisfies('@master'):
+ with working_dir(test_dir):
+ make('-f', 'Makefile.spack', 'all')
+ self.run_test('./run-sort', status=[0], installed=False,
+ purpose='selection sort unit test')
+ else:
+ tty.warn('spack test for hpctoolkit requires branch master')
diff --git a/var/spack/repos/builtin/packages/hpx/package.py b/var/spack/repos/builtin/packages/hpx/package.py
index ab3160fc12..fb062de429 100644
--- a/var/spack/repos/builtin/packages/hpx/package.py
+++ b/var/spack/repos/builtin/packages/hpx/package.py
@@ -146,6 +146,11 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage):
# Patches and one-off conflicts
+ # Boost and HIP don't work together in certain versions:
+ # https://github.com/boostorg/config/issues/392. Boost 1.78.0 and HPX 1.8.0
+ # both include a fix.
+ conflicts("boost@:1.77.0", when="@:1.7 +rocm")
+
# boost 1.73.0 build problem with HPX 1.4.0 and 1.4.1
# https://github.com/STEllAR-GROUP/hpx/issues/4728#issuecomment-640685308
depends_on('boost@:1.72.0', when='@:1.4')
diff --git a/var/spack/repos/builtin/packages/htslib/package.py b/var/spack/repos/builtin/packages/htslib/package.py
index ab451ac1fc..8c998b1863 100644
--- a/var/spack/repos/builtin/packages/htslib/package.py
+++ b/var/spack/repos/builtin/packages/htslib/package.py
@@ -12,6 +12,7 @@ class Htslib(AutotoolsPackage):
homepage = "https://github.com/samtools/htslib"
url = "https://github.com/samtools/htslib/releases/download/1.13/htslib-1.13.tar.bz2"
+ version('1.14', sha256='ed221b8f52f4812f810eebe0cc56cd8355a5c9d21c62d142ac05ad0da147935f')
version('1.13', sha256='f2407df9f97f0bb6b07656579e41a1ca5100464067b6b21bf962a2ea4b0efd65')
version('1.12', sha256='2280141b46e953ba4ae01b98335a84f8e6ccbdb6d5cdbab7f70ee4f7e3b6f4ca')
version('1.10.2', sha256='e3b543de2f71723830a1e0472cf5489ec27d0fbeb46b1103e14a11b7177d1939')
diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py
index a3a8597236..3efa4cd5a2 100644
--- a/var/spack/repos/builtin/packages/hwloc/package.py
+++ b/var/spack/repos/builtin/packages/hwloc/package.py
@@ -135,6 +135,14 @@ class Hwloc(AutotoolsPackage):
if '+opencl' not in self.spec:
args.append('--disable-opencl')
+ # If ROCm libraries are found in system /opt/rocm
+ # during config stage, hwloc builds itself with
+ # librocm_smi support.
+ # This can fail the config tests while building
+ # OpenMPI due to lack of rpath to librocm_smi
+ if '+rocm' not in self.spec:
+ args.append('--disable-rsmi')
+
if '+netloc' in self.spec:
args.append('--enable-netloc')
diff --git a/var/spack/repos/builtin/packages/imgui/package.py b/var/spack/repos/builtin/packages/imgui/package.py
new file mode 100644
index 0000000000..833a04a6f2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/imgui/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Imgui(Package):
+ """Dear ImGui is a bloat-free graphical user interface library for C++.
+
+ It outputs optimized vertex buffers that you can render anytime in your 3D-pipeline
+ enabled application. It is fast, portable, renderer agnostic and self-contained
+ (no external dependencies)."""
+
+ homepage = "https://github.com/ocornut/imgui"
+ url = "https://github.com/ocornut/imgui/archive/refs/tags/v1.85.tar.gz"
+
+ version('1.85', sha256='7ed49d1f4573004fa725a70642aaddd3e06bb57fcfe1c1a49ac6574a3e895a77')
+
+ def install(self, spec, prefix):
+ # No specific build process is required.
+ # You can add the .cpp files to your existing project.
+ install_tree('.', prefix)
diff --git a/var/spack/repos/builtin/packages/intel-daal/package.py b/var/spack/repos/builtin/packages/intel-daal/package.py
index df48989c02..a979848fb7 100644
--- a/var/spack/repos/builtin/packages/intel-daal/package.py
+++ b/var/spack/repos/builtin/packages/intel-daal/package.py
@@ -9,6 +9,8 @@ from spack import *
class IntelDaal(IntelPackage):
"""Intel Data Analytics Acceleration Library."""
+ maintainers = ['rscohn2']
+
homepage = "https://software.intel.com/en-us/daal"
version('2020.2.254', sha256='08528bc150dad312ff2ae88ce12d6078ed8ba2f378f4bf3daf0fbbb9657dce1e',
diff --git a/var/spack/repos/builtin/packages/intel-llvm/package.py b/var/spack/repos/builtin/packages/intel-llvm/package.py
index f14094eba4..b6df7c12e9 100644
--- a/var/spack/repos/builtin/packages/intel-llvm/package.py
+++ b/var/spack/repos/builtin/packages/intel-llvm/package.py
@@ -10,6 +10,8 @@ class IntelLlvm(CMakePackage):
"""Intel's version of the LLVM compiler.
"""
+ maintainers = ['rscohn2']
+
homepage = 'https://github.com/intel/llvm'
git = 'https://github.com/intel/llvm.git'
diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py
index d7050b03b7..be2e7368fa 100644
--- a/var/spack/repos/builtin/packages/intel-mkl/package.py
+++ b/var/spack/repos/builtin/packages/intel-mkl/package.py
@@ -11,6 +11,8 @@ from spack import *
class IntelMkl(IntelPackage):
"""Intel Math Kernel Library."""
+ maintainers = ['rscohn2']
+
homepage = "https://software.intel.com/en-us/intel-mkl"
version('2020.4.304', sha256='2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5',
diff --git a/var/spack/repos/builtin/packages/intel-mpi/package.py b/var/spack/repos/builtin/packages/intel-mpi/package.py
index e8e20af1cb..91eaf9b4a6 100644
--- a/var/spack/repos/builtin/packages/intel-mpi/package.py
+++ b/var/spack/repos/builtin/packages/intel-mpi/package.py
@@ -9,6 +9,8 @@ from spack import *
class IntelMpi(IntelPackage):
"""Intel MPI"""
+ maintainers = ['rscohn2']
+
homepage = "https://software.intel.com/en-us/intel-mpi-library"
version('2019.10.317', sha256='28e1b615e63d2170a99feedc75e3b0c5a7e1a07dcdaf0a4181831b07817a5346',
@@ -57,6 +59,9 @@ class IntelMpi(IntelPackage):
provides('mpi')
+ variant('external-libfabric', default=False, description='Enable external libfabric dependency')
+ depends_on('libfabric', when='+external-libfabric', type=('build', 'link', 'run'))
+
def setup_dependent_build_environment(self, *args):
# Handle in callback, conveying client's compilers in additional arg.
# CAUTION - DUP code in:
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py
index d85c5f3358..bf9bed4497 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py
@@ -19,6 +19,10 @@ class IntelOneapiAdvisor(IntelOneApiPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/advisor.html'
if platform.system() == 'Linux':
+ version('2022.0.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18369/l_oneapi_advisor_p_2022.0.0.92_offline.sh',
+ sha256='f1c4317c2222c56fb2e292513f7eec7ec27eb1049d3600cb975bc08ed1477993',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18220/l_oneapi_advisor_p_2021.4.0.389_offline.sh',
sha256='dd948f7312629d9975e12a57664f736b8e011de948771b4c05ad444438532be8',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py
index 1806b7ff40..1e4d1f8ee6 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py
@@ -18,6 +18,10 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage):
depends_on('intel-oneapi-mpi')
if platform.system() == 'Linux':
+ version('2021.5.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18371/l_oneapi_ccl_p_2021.5.0.478_offline.sh',
+ sha256='47584ad0269fd13bcfbc2cd0bb029bdcc02b723070abcb3d5e57f9586f4e74f8',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18188/l_oneapi_ccl_p_2021.4.0.433_offline.sh',
sha256='004031629d97ef99267d8ea962b666dc4be1560d7d32bd510f97bc81d9251ef6',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py
index cc53b757d2..debac97dd3 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py
@@ -22,6 +22,16 @@ class IntelOneapiCompilers(IntelOneApiPackage):
depends_on('patchelf', type='build')
if platform.system() == 'Linux':
+ version('2022.0.1',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18435/l_dpcpp-cpp-compiler_p_2022.0.1.71_offline.sh',
+ sha256='c7cddc64c3040eece2dcaf48926ba197bb27e5a46588b1d7b3beddcdc379926a',
+ expand=False)
+ resource(name='fortran-installer',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18436/l_fortran-compiler_p_2022.0.1.70_offline.sh',
+ sha256='2cb28a04f93554bfeffd6cad8bd0e7082735f33d73430655dea86df8933f50d1',
+ expand=False,
+ placement='fortran-installer',
+ when='@2022.0.1')
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18209/l_dpcpp-cpp-compiler_p_2021.4.0.3201_offline.sh',
sha256='9206bff1c2fdeb1ca0d5f79def90dcf3e6c7d5711b9b5adecd96a2ba06503828',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py
index b6c48132ea..5e3f2b5c51 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py
@@ -17,6 +17,10 @@ class IntelOneapiDal(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onedal.html'
if platform.system() == 'Linux':
+ version('2021.5.1',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18432/l_daal_oneapi_p_2021.5.1.803_offline.sh',
+ sha256='bba7bee3caef14fbb54ad40615222e5da429496455edf7375f11fd84a72c87ba',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18218/l_daal_oneapi_p_2021.4.0.729_offline.sh',
sha256='61da9d2a40c75edadff65d052fd84ef3db1da5d94f86ad3956979e6988549dda',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py
index 123ca60e1a..0bc921501a 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py
@@ -17,6 +17,10 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onednn.html'
if platform.system() == 'Linux':
+ version('2022.0.1',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18441/l_onednn_p_2022.0.1.26_offline.sh',
+ sha256='8339806300d83d2629952e6e2f2758b52f517c072a20b7b7fc5642cf1e2a5410',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18221/l_onednn_p_2021.4.0.467_offline.sh',
sha256='30cc601467f6a94b3d7e14f4639faf0b12fdf6d98df148b07acdb4dfdfb971db',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py
index 954c57586b..68c2de4a33 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py
@@ -17,6 +17,10 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage):
homepage = 'https://github.com/oneapi-src/oneDPL'
if platform.system() == 'Linux':
+ version('2021.6.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18372/l_oneDPL_p_2021.6.0.501_offline.sh',
+ sha256='0225f133a6c38b36d08635986870284a958e5286c55ca4b56a4058bd736f8f4f',
+ expand=False)
version('2021.5.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18189/l_oneDPL_p_2021.5.0.445_offline.sh',
sha256='7d4adf300a18f779c3ab517070c61dba10e3952287d5aef37c38f739e9041a68',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py
index 60c853886a..bcf5d64cd0 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py
@@ -17,6 +17,10 @@ class IntelOneapiInspector(IntelOneApiPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/inspector.html'
if platform.system() == 'Linux':
+ version('2022.0.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18363/l_inspector_oneapi_p_2022.0.0.56_offline.sh',
+ sha256='79a0eb2ae3f1de1e3456076685680c468702922469c3fda3e074718fb0bea741',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18239/l_inspector_oneapi_p_2021.4.0.266_offline.sh',
sha256='c8210cbcd0e07cc75e773249a5e4a02cf34894ec80a213939f3a20e6c5705274',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py
index c6c4de62e6..0d36f01ee3 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py
@@ -16,6 +16,10 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html'
if platform.system() == 'Linux':
+ version('2021.5.1',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18440/l_ipp_oneapi_p_2021.5.1.522_offline.sh',
+ sha256='be99f9b0b2cc815e017188681ab997f3ace94e3010738fa6f702f2416dac0de4',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18219/l_ipp_oneapi_p_2021.4.0.459_offline.sh',
sha256='1a7a8fe5502ae61c10f5c432b7662c6fa542e5832a40494eb1c3a2d8e27c9f3e',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py
index 51b3b807c7..725a718fc0 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py
@@ -17,6 +17,10 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html'
if platform.system() == 'Linux':
+ version('2021.5.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18364/l_ippcp_oneapi_p_2021.5.0.445_offline.sh',
+ sha256='e71aee288cc970b9c9fe21f7d5c300dbc2a4ea0687c7028f200d6b87e6c895a1',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18187/l_ippcp_oneapi_p_2021.4.0.401_offline.sh',
sha256='2ca2320f733ee75b4a27865185a1b0730879fe2c47596e570b1bd50d0b8ac608',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py
index 2a5e283602..4aa5fb18cf 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py
@@ -17,6 +17,10 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html'
if platform.system() == 'Linux':
+ version('2022.0.1',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18444/l_onemkl_p_2022.0.1.117_offline.sh',
+ sha256='22afafbe2f3762eca052ac21ec40b845ff2f3646077295c88c2f37f80a0cc160',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18222/l_onemkl_p_2021.4.0.640_offline.sh',
sha256='9ad546f05a421b4f439e8557fd0f2d83d5e299b0d9bd84bdd86be6feba0c3915',
@@ -34,13 +38,18 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage):
sha256='818b6bd9a6c116f4578cda3151da0612ec9c3ce8b2c8a64730d625ce5b13cc0c',
expand=False)
+ variant('shared', default=True, description='Builds shared library')
variant('ilp64', default=False,
description='Build with ILP64 support')
+ variant('cluster', default=False,
+ description='Build with cluster support: scalapack, blacs, etc')
depends_on('intel-oneapi-tbb')
+ # cluster libraries need mpi
+ depends_on('mpi', when='+cluster')
provides('fftw-api@3')
- provides('scalapack')
+ provides('scalapack', when='+cluster')
provides('mkl')
provides('lapack')
provides('blas')
@@ -59,10 +68,31 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage):
include_path = join_path(self.component_path, 'include')
return find_headers('*', include_path)
+ # provide cluster libraries if +cluster variant is used or
+ # the scalapack virtual package was requested
+ def cluster(self):
+ return '+cluster' in self.spec
+
@property
def libs(self):
- mkl_libs = [self.xlp64_lib('libmkl_intel'), 'libmkl_sequential', 'libmkl_core']
+ shared = '+shared' in self.spec
+ mkl_libs = []
+ if self.cluster():
+ mkl_libs += [self.xlp64_lib('libmkl_scalapack'),
+ 'libmkl_cdft_core']
+ mkl_libs += [self.xlp64_lib('libmkl_intel'),
+ 'libmkl_sequential',
+ 'libmkl_core']
+ if self.cluster():
+ mkl_libs += [self.xlp64_lib('libmkl_blacs_intelmpi')]
libs = find_libraries(mkl_libs,
- join_path(self.component_path, 'lib', 'intel64'))
- libs += find_system_libraries(['libpthread', 'libm', 'libdl'])
- return libs
+ join_path(self.component_path, 'lib', 'intel64'),
+ shared=shared)
+ system_libs = find_system_libraries(['libpthread', 'libm', 'libdl'])
+ if shared:
+ return libs + system_libs
+ else:
+ return IntelOneApiStaticLibraryList(libs, system_libs)
+
+ def setup_dependent_build_environment(self, env, dependent_spec):
+ env.set('MKLROOT', self.component_path)
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py
index 2672613c5a..52977eac3e 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py
@@ -4,9 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import glob
import platform
-import subprocess
from spack import *
@@ -19,6 +17,10 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/mpi-library.html'
if platform.system() == 'Linux':
+ version('2021.5.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18370/l_mpi_oneapi_p_2021.5.0.495_offline.sh',
+ sha256='3aae53fe77f7c6aac7a32b299c25d6ca9a00ba4e2d512a26edd90811e59e7471',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18186/l_mpi_oneapi_p_2021.4.0.441_offline.sh',
sha256='cc4b7072c61d0bd02b1c431b22d2ea3b84b967b59d2e587e77a9e7b2c24f2a29',
@@ -38,11 +40,11 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage):
variant('ilp64', default=False,
description='Build with ILP64 support')
+ variant('external-libfabric', default=False, description='Enable external libfabric dependency')
+ depends_on('libfabric', when='+external-libfabric', type=('link', 'run'))
provides('mpi@:3.1')
- depends_on('patchelf', type='build')
-
@property
def component_dir(self):
return 'mpi'
@@ -87,17 +89,19 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage):
libs += find_libraries(['libmpicxx', 'libmpifort'], lib_dir)
libs += find_libraries('libmpi', release_lib_dir)
libs += find_system_libraries(['libdl', 'librt', 'libpthread'])
+
+ # Find libfabric for libmpi.so
+ if '+external-libfabric' in self.spec:
+ libs += self.spec['libfabric'].libs
+ else:
+ libs += find_libraries(['libfabric'],
+ join_path(self.component_path, 'libfabric', 'lib'))
+
return libs
def install(self, spec, prefix):
super(IntelOneapiMpi, self).install(spec, prefix)
- # Patch libmpi.so rpath so it can find libfabric
- libfabric_rpath = join_path(self.component_path, 'libfabric', 'lib')
- for libmpi in glob.glob(join_path(self.component_path,
- 'lib', '**', 'libmpi*.so')):
- subprocess.call(['patchelf', '--set-rpath', libfabric_rpath, libmpi])
-
# When spack builds from source
# fix I_MPI_SUBSTITUTE_INSTALLDIR and
# __EXEC_PREFIX_TO_BE_FILLED_AT_INSTALL_TIME__
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py
index e0af8ccc60..a2b3e4a2e6 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py
@@ -17,6 +17,10 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onetbb.html'
if platform.system() == 'Linux':
+ version('2021.5.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18380/l_tbb_oneapi_p_2021.5.0.707_offline.sh',
+ sha256='6ff7890a74a43ae02e0fa2d9c5533fce70a49dff8e73278b546a0995367fec5e',
+ expand=False)
version('2021.4.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18194/l_tbb_oneapi_p_2021.4.0.643_offline.sh',
sha256='33332012ff8ffe7987b1a20bea794d76f7d8050ccff04fa6e1990974c336ee24',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py
index d7f404e419..5a5195972a 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py
@@ -16,6 +16,10 @@ class IntelOneapiVpl(IntelOneApiLibraryPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onevpl.html'
if platform.system() == 'Linux':
+ version('2022.0.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18375/l_oneVPL_p_2022.0.0.58_offline.sh',
+ sha256='600b8566e1aa523b97291bed6b08f69a04bc7c4c75c035942a64a38f45a1a7f0',
+ expand=False)
version('2021.6.0',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18190/l_oneVPL_p_2021.6.0.458_offline.sh',
sha256='40c50008be3f03d17cc8c0c34324593c1d419ee4c45af5543aa5a2d5fb11071f',
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py
index dd1b0317ad..872d1bdde9 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py
@@ -19,6 +19,10 @@ class IntelOneapiVtune(IntelOneApiPackage):
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/vtune-profiler.html'
if platform.system() == 'Linux':
+ version('2022.0.0',
+ url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18406/l_oneapi_vtune_p_2022.0.0.94_offline.sh',
+ sha256='aa4d575c22e7be0c950b87d67d9e371f470f682906864c4f9b68e530ecd22bd7',
+ expand=False)
version('2021.7.1',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18086/l_oneapi_vtune_p_2021.7.1.492_offline.sh',
sha256='4cf17078ae6e09f26f70bd9d0b726af234cc30c342ae4a8fda69941b40139b26',
diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py
index d0a66395d2..eaeac3339e 100644
--- a/var/spack/repos/builtin/packages/intel-tbb/package.py
+++ b/var/spack/repos/builtin/packages/intel-tbb/package.py
@@ -21,6 +21,8 @@ class IntelTbb(CMakePackage):
url_prefix = 'https://github.com/oneapi-src/oneTBB/'
url = url_prefix + 'archive/v2020.1.tar.gz'
+ maintainers = ['rscohn2']
+
# Note: when adding new versions, please check and update the
# patches, filters and url_for_version() below as needed.
diff --git a/var/spack/repos/builtin/packages/interproscan/package.py b/var/spack/repos/builtin/packages/interproscan/package.py
index 5639f6caf1..b9d764f160 100644
--- a/var/spack/repos/builtin/packages/interproscan/package.py
+++ b/var/spack/repos/builtin/packages/interproscan/package.py
@@ -39,6 +39,7 @@ class Interproscan(Package):
depends_on('perl-xml-parser', when='@:4.8', type=('build', 'run'))
depends_on('perl-io-string', when='@:4.8', type=('build', 'run'))
depends_on('perl-io-stringy', when='@:4.8', type=('build', 'run'))
+ depends_on('perl-db-file', when='@:4.8', type=('build', 'run'))
patch('large-gid.patch', when='@5:')
patch('non-interactive.patch', when='@:4.8')
diff --git a/var/spack/repos/builtin/packages/ip/package.py b/var/spack/repos/builtin/packages/ip/package.py
index 52fdecf927..1fccdce7a7 100644
--- a/var/spack/repos/builtin/packages/ip/package.py
+++ b/var/spack/repos/builtin/packages/ip/package.py
@@ -19,3 +19,10 @@ class Ip(CMakePackage):
version('3.3.3', sha256='d5a569ca7c8225a3ade64ef5cd68f3319bcd11f6f86eb3dba901d93842eb3633')
depends_on('sp')
+
+ def setup_run_environment(self, env):
+ for suffix in ('4', '8', 'd'):
+ lib = find_libraries('libip_4', root=self.prefix,
+ shared=False, recursive=True)
+ env.set('IP_LIB' + suffix, lib[0])
+ env.set('IP_INC' + suffix, join_path(self.prefix, 'include_' + suffix))
diff --git a/var/spack/repos/builtin/packages/ip2/package.py b/var/spack/repos/builtin/packages/ip2/package.py
index aac5c0b538..ef7577dc1f 100644
--- a/var/spack/repos/builtin/packages/ip2/package.py
+++ b/var/spack/repos/builtin/packages/ip2/package.py
@@ -22,3 +22,10 @@ class Ip2(CMakePackage):
version('1.1.2', sha256='73c6beec8fd463ec7ccba3633d8c5d53d385c43d507367efde918c2db0af42ab')
depends_on('sp')
+
+ def setup_run_environment(self, env):
+ for suffix in ('4', '8', 'd'):
+ lib = find_libraries('libip2_' + suffix, root=self.prefix,
+ shared=False, recursive=True)
+ env.set('IP2_LIB' + suffix, lib[0])
+ env.set('IP2_INC' + suffix, join_path(self.prefix, 'include_' + suffix))
diff --git a/var/spack/repos/builtin/packages/isaac-server/arm.patch b/var/spack/repos/builtin/packages/isaac-server/arm.patch
index 895fa3d57f..895fa3d57f 100755..100644
--- a/var/spack/repos/builtin/packages/isaac-server/arm.patch
+++ b/var/spack/repos/builtin/packages/isaac-server/arm.patch
diff --git a/var/spack/repos/builtin/packages/isescan/package.py b/var/spack/repos/builtin/packages/isescan/package.py
index 44394d284e..96531d9804 100644
--- a/var/spack/repos/builtin/packages/isescan/package.py
+++ b/var/spack/repos/builtin/packages/isescan/package.py
@@ -22,7 +22,7 @@ class Isescan(Package):
depends_on('py-fastcluster', type='run')
depends_on('py-argparse', type='run')
depends_on('blast-plus@2.2.31:', type='run')
- depends_on('fraggenescan@1.30:', type='run')
+ depends_on('fraggenescan@:1.30', type='run')
depends_on('hmmer@3.1b2:', type='run')
def setup_run_environment(self, env):
diff --git a/var/spack/repos/builtin/packages/jq/builtinc.patch b/var/spack/repos/builtin/packages/jq/builtinc.patch
new file mode 100644
index 0000000000..943edbc9d2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/jq/builtinc.patch
@@ -0,0 +1,48 @@
+diff --git a/src/builtin.c b/src/builtin.c
+old mode 100644
+new mode 100755
+index c6c8c2e..e336472
+--- a/src/builtin.c
++++ b/src/builtin.c
+@@ -185,7 +185,7 @@ static jv f_modf(jq_state *jq, jv input) {
+ return jv_array_append(ret, jv_number(i));
+ }
+ #endif
+-#ifdef HAVE_LGAMMA_R
++#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT))
+ static jv f_lgamma_r(jq_state *jq, jv input) {
+ if (jv_get_kind(input) != JV_KIND_NUMBER) {
+ return type_error(input, "number required");
+@@ -1581,7 +1581,7 @@ static const struct cfunction function_list[] = {
+ #ifdef HAVE_MODF
+ {(cfunction_ptr)f_modf,"modf", 1},
+ #endif
+-#ifdef HAVE_LGAMMA_R
++#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT))
+ {(cfunction_ptr)f_lgamma_r,"lgamma_r", 1},
+ #endif
+ {(cfunction_ptr)f_plus, "_plus", 3},
+diff --git a/src/builtin.c b/src/builtin.c
+old mode 100644
+new mode 100755
+index c6c8c2e..e336472
+--- a/src/builtin.c
++++ b/src/builtin.c
+@@ -185,7 +185,7 @@ static jv f_modf(jq_state *jq, jv input) {
+ return jv_array_append(ret, jv_number(i));
+ }
+ #endif
+-#ifdef HAVE_LGAMMA_R
++#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT))
+ static jv f_lgamma_r(jq_state *jq, jv input) {
+ if (jv_get_kind(input) != JV_KIND_NUMBER) {
+ return type_error(input, "number required");
+@@ -1581,7 +1581,7 @@ static const struct cfunction function_list[] = {
+ #ifdef HAVE_MODF
+ {(cfunction_ptr)f_modf,"modf", 1},
+ #endif
+-#ifdef HAVE_LGAMMA_R
++#if defined(HAVE_LGAMMA_R) && !(defined(__APPLE__) && defined(__GNUC__) && !defined(_REENTRANT))
+ {(cfunction_ptr)f_lgamma_r,"lgamma_r", 1},
+ #endif
+ {(cfunction_ptr)f_plus, "_plus", 3},
diff --git a/var/spack/repos/builtin/packages/jq/package.py b/var/spack/repos/builtin/packages/jq/package.py
index 613ed97c20..88061fe266 100644
--- a/var/spack/repos/builtin/packages/jq/package.py
+++ b/var/spack/repos/builtin/packages/jq/package.py
@@ -4,8 +4,10 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
+import sys
from spack import *
+from spack.operating_systems.mac_os import macos_version
class Jq(AutotoolsPackage):
@@ -20,6 +22,9 @@ class Jq(AutotoolsPackage):
depends_on('oniguruma')
depends_on('bison@3.0:', type='build')
+ if sys.platform == 'darwin' and macos_version() >= Version('10.15'):
+ patch('builtinc.patch', when='@1.5:')
+
@run_after('install')
@on_package_attributes(run_tests=True)
def install_test(self):
diff --git a/var/spack/repos/builtin/packages/julia/armgcc.patch b/var/spack/repos/builtin/packages/julia/armgcc.patch
index 50719face8..50719face8 100755..100644
--- a/var/spack/repos/builtin/packages/julia/armgcc.patch
+++ b/var/spack/repos/builtin/packages/julia/armgcc.patch
diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py
index 7267eb439e..c0e7853892 100644
--- a/var/spack/repos/builtin/packages/julia/package.py
+++ b/var/spack/repos/builtin/packages/julia/package.py
@@ -19,8 +19,9 @@ class Julia(Package):
maintainers = ['glennpj', 'vchuravy']
version('master', branch='master')
- version('1.7.0-rc2', sha256='178f5531bdbd379bd376f3cccf9a6a1e1215a678b70c93bdc41edb7239c76dc2')
- version('1.6.3', sha256='29aad934582fb4c6dd9f9dd558ad649921f43bc7320eab54407fdf6dd3270a33', preferred=True)
+ version('1.7.0', sha256='d40d83944f8e1709de1d6f7544e1a6721e091f70ba06b44c25b89bdba754dfa6', preferred=True)
+ version('1.6.4', sha256='954578b973fdb891c88fa1eedd931129e215ab928ecc416dd0bdf6c70549d2fc')
+ version('1.6.3', sha256='29aad934582fb4c6dd9f9dd558ad649921f43bc7320eab54407fdf6dd3270a33')
version('1.6.2', sha256='01241120515cb9435b96179cf301fbd2c24d4405f252588108d13ceac0f41c0a')
version('1.6.1', sha256='71d8e40611361370654e8934c407b2dec04944cf3917c5ecb6482d6b85ed767f')
version('1.6.0', sha256='1b05f42c9368bc2349c47363b7ddc175a2da3cd162d52b6e24c4f5d4d6e1232c')
@@ -176,6 +177,12 @@ class Julia(Package):
target_str = "znver1"
if target_str == "zen2":
target_str = "znver2"
+ if target_str == "zen3":
+ if spec.satisfies('@1.7.0:'):
+ target_str = "znver3"
+ else:
+ # The LLVM in @1.6.4 doesn't support znver3.
+ target_str = "znver2"
options += [
'JULIA_CPU_TARGET={0}'.format(target_str)
]
diff --git a/var/spack/repos/builtin/packages/kokkos-kernels/package.py b/var/spack/repos/builtin/packages/kokkos-kernels/package.py
index c41a1607aa..8b046dd89f 100644
--- a/var/spack/repos/builtin/packages/kokkos-kernels/package.py
+++ b/var/spack/repos/builtin/packages/kokkos-kernels/package.py
@@ -102,8 +102,11 @@ class KokkosKernels(CMakePackage, CudaPackage):
options.append("-DSpack_WORKAROUND=On")
options.append("-DKokkos_ROOT=%s" % spec["kokkos"].prefix)
- # Compiler weirdness due to nvcc_wrapper
- options.append("-DCMAKE_CXX_COMPILER=%s" % spec["kokkos"].kokkos_cxx)
+ if spec.satisfies('^kokkos+rocm'):
+ options.append("-DCMAKE_CXX_COMPILER=%s" % spec['hip'].hipcc)
+ else:
+ # Compiler weirdness due to nvcc_wrapper
+ options.append("-DCMAKE_CXX_COMPILER=%s" % spec["kokkos"].kokkos_cxx)
if self.run_tests:
options.append("-DKokkosKernels_ENABLE_TESTS=ON")
diff --git a/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py
index bdba998849..c26522bea7 100644
--- a/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py
+++ b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py
@@ -42,7 +42,7 @@ class KokkosNvccWrapper(Package):
env.set('KOKKOS_CXX', self.compiler.cxx)
env.set('MPICH_CXX', wrapper)
env.set('OMPI_CXX', wrapper)
- env.set('MPICXX_CXX', wrapper) # HPE MPT
+ env.set('MPICXX_CXX', wrapper) # HPE MPT
def setup_dependent_package(self, module, dependent_spec):
wrapper = join_path(self.prefix.bin, "nvcc_wrapper")
diff --git a/var/spack/repos/builtin/packages/landsfcutil/package.py b/var/spack/repos/builtin/packages/landsfcutil/package.py
index b43879c71c..7a2f77edc2 100644
--- a/var/spack/repos/builtin/packages/landsfcutil/package.py
+++ b/var/spack/repos/builtin/packages/landsfcutil/package.py
@@ -18,3 +18,12 @@ class Landsfcutil(CMakePackage):
maintainers = ['edwardhartnett', 'kgerheiser', 'Hang-Lei-NOAA']
version('2.4.1', sha256='831c5005a480eabe9a8542b4deec838c2650f6966863ea2711cc0cc5db51ca14')
+
+ def setup_run_environment(self, env):
+ for suffix in ('4', 'd'):
+ lib = find_libraries('liblandsfcutil_' + suffix, root=self.prefix,
+ shared=False, recursive=True)
+
+ env.set('LANDSFCUTIL_LIB' + suffix, lib[0])
+ env.set('LANDSFCUTIL_INC' + suffix,
+ join_path(self.prefix, 'include_' + suffix))
diff --git a/var/spack/repos/builtin/packages/lanl-cmake-modules/package.py b/var/spack/repos/builtin/packages/lanl-cmake-modules/package.py
new file mode 100644
index 0000000000..7e0ecdf583
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lanl-cmake-modules/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class LanlCmakeModules(CMakePackage):
+ '''CMake modules for projects that have not yet adopted modern CMake.
+ '''
+
+ maintainers = ['tuxfan']
+ homepage = 'https://lanl.github.io/cmake-modules'
+ git = 'https://github.com/lanl/cmake-modules.git'
+
+ version('develop', branch='develop')
diff --git a/var/spack/repos/builtin/packages/lcio/package.py b/var/spack/repos/builtin/packages/lcio/package.py
index 8410506aa9..017a6a1314 100644
--- a/var/spack/repos/builtin/packages/lcio/package.py
+++ b/var/spack/repos/builtin/packages/lcio/package.py
@@ -19,6 +19,7 @@ class Lcio(CMakePackage):
maintainers = ['gaede', 'vvolkl']
version('master', branch='master')
+ version('2.17', sha256='a81e07790443f0e2d9abb18bc3b5f2929edbc8d8e4f307f931679eaa39bb044a')
version('2.16.1', sha256='992a649f864785e62fe12d7a638b2696c91f9535881de33f22b3cceabcdcdbaf')
version('2.16', sha256='aff7707750d821f31cbae3d7529fd8e22457f48d759e834ec01aa9389b5dbf1a')
version('2.15.4', sha256='720c8130762d445df44d2c245da01c0a1ca807d7ed62362cebf7b3a99f9a37d7')
diff --git a/var/spack/repos/builtin/packages/legion/package.py b/var/spack/repos/builtin/packages/legion/package.py
index 85373b53fb..fe13d19736 100644
--- a/var/spack/repos/builtin/packages/legion/package.py
+++ b/var/spack/repos/builtin/packages/legion/package.py
@@ -198,9 +198,6 @@ class Legion(CMakePackage):
variant('max_fields', values=int, default=512,
description="Maximum number of fields allowed in a logical region.")
- variant('native', default=False,
- description="Enable native/host processor optimizaton target.")
-
def cmake_args(self):
spec = self.spec
cmake_cxx_flags = []
@@ -339,10 +336,9 @@ class Legion(CMakePackage):
maxfields = maxfields << 1
options.append('-DLegion_MAX_FIELDS=%d' % maxfields)
- if '+native' in spec:
- # default is off.
- options.append('-DBUILD_MARCH:STRING=native')
-
+ # This disables Legion's CMake build system's logic for targeting the native
+ # CPU architecture in favor of Spack-provided compiler flags
+ options.append('-DBUILD_MARCH:STRING=')
return options
@run_after('install')
diff --git a/var/spack/repos/builtin/packages/libbeagle/package.py b/var/spack/repos/builtin/packages/libbeagle/package.py
index 50b0fd4f34..0e6dc470d2 100644
--- a/var/spack/repos/builtin/packages/libbeagle/package.py
+++ b/var/spack/repos/builtin/packages/libbeagle/package.py
@@ -25,8 +25,10 @@ class Libbeagle(AutotoolsPackage, CudaPackage):
depends_on('subversion', type='build')
depends_on('pkgconfig', type='build')
depends_on('java', type='build')
+ depends_on('opencl', when='+opencl')
cuda_arch_values = CudaPackage.cuda_arch_values
+ variant('opencl', default=False, description='Include OpenCL (GPU) support')
variant(
'cuda_arch',
description='CUDA architecture',
@@ -54,6 +56,9 @@ class Libbeagle(AutotoolsPackage, CudaPackage):
filter_file('-L$with_cuda/lib', '-L$with_cuda/lib64/stubs',
'configure.ac', string=True)
+ def autoreconf(self, spec, prefix):
+ which('bash')('autogen.sh')
+
def configure_args(self):
args = [
# Since spack will inject architecture flags turn off -march=native
@@ -62,8 +67,13 @@ class Libbeagle(AutotoolsPackage, CudaPackage):
]
if '+cuda' in self.spec:
- args.append('--with-cuda=%s' % self.spec['cuda'].prefix)
+ args.append('--with-cuda={0}'.format(self.spec['cuda'].prefix))
else:
args.append('--without-cuda')
+ if '+opencl' in self.spec:
+ args.append('--with-opencl={0}'.format(self.spec['opencl'].prefix))
+ else:
+ args.append('--without-opencl')
+
return args
diff --git a/var/spack/repos/builtin/packages/libbeato/package.py b/var/spack/repos/builtin/packages/libbeato/package.py
index eaae48cf6c..7465167f88 100644
--- a/var/spack/repos/builtin/packages/libbeato/package.py
+++ b/var/spack/repos/builtin/packages/libbeato/package.py
@@ -14,4 +14,4 @@ class Libbeato(AutotoolsPackage):
homepage = "https://github.com/CRG-Barcelona/libbeato"
git = "https://github.com/CRG-Barcelona/libbeato.git"
- version('master', brancch='master')
+ version('master', branch='master')
diff --git a/var/spack/repos/builtin/packages/libblastrampoline/package.py b/var/spack/repos/builtin/packages/libblastrampoline/package.py
index 6257e5a98f..8c0148d2ce 100644
--- a/var/spack/repos/builtin/packages/libblastrampoline/package.py
+++ b/var/spack/repos/builtin/packages/libblastrampoline/package.py
@@ -9,16 +9,19 @@ from spack import *
class Libblastrampoline(MakefilePackage):
"""Using PLT trampolines to provide a BLAS and LAPACK demuxing library."""
- homepage = "https://github.com/staticfloat/libblastrampoline"
- git = "https://github.com/staticfloat/libblastrampoline.git"
-
- version('3.1.0', commit='c6c7bc5d4ae088bd7c519d58e3fb8b686d00db0c')
- version('3.0.4', commit='23de7a09bf354fe6f655c457bab5bf47fdd2486d')
- version('3.0.3', commit='7b502b7bb5d4663df4a928d0f605924cd1a35c1a')
- version('3.0.2', commit='5882fdf6395afb1ed01a8a10db94b7b3cbd39e16')
- version('3.0.1', commit='e132e645db28bec024be9410467a6c7a2d0937ae')
- version('3.0.0', commit='7bb259a69e5bad0adb55171b2bee164a30ce2e91')
- version('2.2.0', commit='45f4a20ffdba5d368db66d71885312f5f73c2dc7')
+ homepage = "https://github.com/JuliaLinearAlgebra/libblastrampoline"
+ git = "https://github.com/JuliaLinearAlgebra/libblastrampoline.git"
+ url = "https://github.com/JuliaLinearAlgebra/libblastrampoline/archive/refs/tags/v3.1.0.tar.gz"
+
+ maintainers = ['haampie']
+
+ version('3.1.0', sha256='f6136cc2b5d090ceca67cffa55b4c8af4bcee874333d49297c867abdb0749b5f')
+ version('3.0.4', sha256='3c8a54a3bd8a2737b7f74ebeb56df8e2a48083c9094dbbff80b225c228e31793')
+ version('3.0.3', sha256='a9c553ee6f20fa2f92098edcb3fc4a331c653250e559f72b9317b4ee84500cd7')
+ version('3.0.2', sha256='caefd708cf0cf53b01cea74a09ab763bf4dfa4aec4468892720f3921521c1f74')
+ version('3.0.1', sha256='b5b8ac0d3aba1bcb9dc26d7d6bb36b352d45e7d7e2594c6122e72b9e5d75a772')
+ version('3.0.0', sha256='4d0856d30e7ba0cb0de08b08b60fd34879ce98714341124acf87e587d1bbbcde')
+ version('2.2.0', sha256='1fb8752891578b45e187019c67fccbaafb108756aadc69bdd876033846ad30d3')
build_directory = 'src'
diff --git a/var/spack/repos/builtin/packages/libcroco/package.py b/var/spack/repos/builtin/packages/libcroco/package.py
index 788ae665e2..586bbce4b2 100644
--- a/var/spack/repos/builtin/packages/libcroco/package.py
+++ b/var/spack/repos/builtin/packages/libcroco/package.py
@@ -15,12 +15,18 @@ class Libcroco(AutotoolsPackage):
version('0.6.13', sha256='767ec234ae7aa684695b3a735548224888132e063f92db585759b422570621d4')
version('0.6.12', sha256='ddc4b5546c9fb4280a5017e2707fbd4839034ed1aba5b7d4372212f34f84f860')
+ variant('doc', default=False, description='Build documentation with gtk-doc')
+
depends_on('glib')
depends_on('libxml2')
- depends_on('gtk-doc', type='build')
+ depends_on('gtk-doc', type='build', when='+doc')
depends_on('pkgconfig', type='build')
def configure_args(self):
+ args = [
+ '--enable-gtk-doc=' + ('yes' if self.spec.variants['doc'].value else 'no')
+ ]
# macOS ld does not support this flag
# https://github.com/Homebrew/homebrew-core/blob/HEAD/Formula/libcroco.rb
- return ['--disable-Bsymbolic']
+ args.append('--disable-Bsymbolic')
+ return args
diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py
index 24c4183cdd..99422f801d 100644
--- a/var/spack/repos/builtin/packages/libctl/package.py
+++ b/var/spack/repos/builtin/packages/libctl/package.py
@@ -11,10 +11,12 @@ class Libctl(AutotoolsPackage):
control files for scientific simulations."""
homepage = "http://ab-initio.mit.edu/wiki/index.php/Libctl"
- url = "http://ab-initio.mit.edu/libctl/libctl-3.2.2.tar.gz"
- list_url = "http://ab-initio.mit.edu/libctl/old"
+ git = "https://github.com/NanoComp/libctl.git"
+ url = "https://github.com/NanoComp/libctl/releases/download/v4.2.0/libctl-4.2.0.tar.gz"
- version('3.2.2', sha256='8abd8b58bc60e84e16d25b56f71020e0cb24d75b28bc5db86d50028197c7efbc')
+ version('4.2.0', sha256='0341ad6ea260ecda2efb3d4b679abb3d05ca6211792381979b036177a9291975')
+ version('3.2.2', sha256='8abd8b58bc60e84e16d25b56f71020e0cb24d75b28bc5db86d50028197c7efbc',
+ url='http://ab-initio.mit.edu/libctl/libctl-3.2.2.tar.gz')
depends_on('guile')
@@ -27,4 +29,5 @@ class Libctl(AutotoolsPackage):
spec['guile'].prefix.bin, 'guile')),
'GUILE_CONFIG={0}'.format(join_path(
spec['guile'].prefix.bin, 'guile-config')),
+ 'LIBS=-lm',
]
diff --git a/var/spack/repos/builtin/packages/libcxxwrap-julia/package.py b/var/spack/repos/builtin/packages/libcxxwrap-julia/package.py
index ee4caebe13..324c2d65c5 100644
--- a/var/spack/repos/builtin/packages/libcxxwrap-julia/package.py
+++ b/var/spack/repos/builtin/packages/libcxxwrap-julia/package.py
@@ -16,7 +16,9 @@ regular CMake library for use in other C++ projects."""
maintainers = ['eloop']
- version('master', branch='master')
+ # note: use the @main branch version if you're building for julia 1.7
+ version('main', branch='main')
+
version('0.8.3', sha256='b0421d11bdee5ce8af4922de6dfe3b0e5d69b07bb52894e3a22a477bbd27ee9e')
version('0.8.2', sha256='f8b171def3d61904ba8f9a9052a405c25afbfb9a3c5af3dd30bc36a0184ed539')
diff --git a/var/spack/repos/builtin/packages/libdrm/package.py b/var/spack/repos/builtin/packages/libdrm/package.py
index 2d7a4133fb..1516913de9 100644
--- a/var/spack/repos/builtin/packages/libdrm/package.py
+++ b/var/spack/repos/builtin/packages/libdrm/package.py
@@ -21,6 +21,8 @@ class Libdrm(AutotoolsPackage):
version('2.4.33', sha256='bd2a8fecf28616f2157ca33ede691c139cc294ed2d0c4244b62ca7d22e98e5a4')
depends_on('pkgconfig', type='build')
+ depends_on('docbook-xml', type='build')
+ depends_on('docbook-xsl', type='build')
depends_on('libpciaccess@0.10:')
depends_on('libpthread-stubs')
@@ -31,6 +33,6 @@ class Libdrm(AutotoolsPackage):
# Needed to fix build for spack/spack#1740, but breaks newer
# builds/compilers
args.append('LIBS=-lrt')
- if self.spec.satisfies('%gcc@10.0.0:'):
+ if self.spec.satisfies('%gcc@10.0.0:') or self.spec.satisfies('%clang@12.0.0:'):
args.append('CFLAGS=-fcommon')
return args
diff --git a/var/spack/repos/builtin/packages/libfabric/package.py b/var/spack/repos/builtin/packages/libfabric/package.py
index efd0aca2c6..60c129e5c2 100644
--- a/var/spack/repos/builtin/packages/libfabric/package.py
+++ b/var/spack/repos/builtin/packages/libfabric/package.py
@@ -17,6 +17,7 @@ class Libfabric(AutotoolsPackage):
maintainers = ['rajachan']
version('master', branch='master')
+ version('1.14.0', sha256='fc261388848f3cff555bd653f5cb901f6b9485ad285e5c53328b13f0e69f749a')
version('1.13.2', sha256='25d783b0722a8df8fe61c1de75fafca684c5fe520303180f26f0ad6409cfc0b9')
version('1.13.1', sha256='8e6eed38c4a39aa4cbf7d5d3734f0eecbfc030182f1f9b3be470702f2586d30e')
version('1.12.1', sha256='db3c8e0a495e6e9da6a7436adab905468aedfbd4579ee3da5232a5c111ba642c')
diff --git a/var/spack/repos/builtin/packages/libfuse/package.py b/var/spack/repos/builtin/packages/libfuse/package.py
index 90577dba96..e11a5f8e6a 100644
--- a/var/spack/repos/builtin/packages/libfuse/package.py
+++ b/var/spack/repos/builtin/packages/libfuse/package.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
import re
from spack import *
@@ -37,6 +38,11 @@ class Libfuse(MesonPackage):
"which typically sets up udev rules and "
"and init script in /etc/init.d"))
+ depends_on('autoconf', type='build', when='@:2')
+ depends_on('automake', type='build', when='@:2')
+ depends_on('libtool', type='build', when='@:2')
+ depends_on('gettext', type='build', when='@:2')
+
provides('fuse')
conflicts("+useroot", when='~system_install', msg="useroot requires system_install")
conflicts('platform=darwin', msg='libfuse does not support OS-X, use macfuse instead')
@@ -45,6 +51,11 @@ class Libfuse(MesonPackage):
patch('0001-Do-not-run-install-script.patch', when='@3: ~system_install')
patch('https://src.fedoraproject.org/rpms/fuse3/raw/0519b7bf17c4dd1b31ee704d49f8ed94aa5ba6ab/f/fuse3-gcc11.patch', sha256='3ad6719d2393b46615b5787e71778917a7a6aaa189ba3c3e0fc16d110a8414ec', when='@3: %gcc@11:')
+ patch('https://github.com/libfuse/libfuse/commit/6d55007027dfe7b75a74899f497f075046cc5404.patch', sha256='d8c54ae932f2e7179dd05081a4a8e7aefd23553a0ef354fa25b1456386d96932', when='@:2')
+ patch('https://github.com/libfuse/libfuse/commit/5d38afc8a5b4a2a6e27aad7a1840046e99cd826d.patch', sha256='7f3e8e54966aca8cb64096bea2cbd4d2679b47f9c1355fe9d442ba8464d74372', when='@:2')
+ # https://bugs.gentoo.org/803923
+ patch('https://github.com/libfuse/libfuse/commit/5a43d0f724c56f8836f3f92411e0de1b5f82db32.patch', sha256='1e8b0a1b2bbaa335d92a3c46e31c928dcd53abe011214a0cbbfa7c11a3a68f1a', when='@:2')
+
executables = ['^fusermount3?$']
@classmethod
@@ -73,6 +84,12 @@ class Libfuse(MesonPackage):
# Before libfuse 3.x this was an autotools package
@when('@:2')
def meson(self, spec, prefix):
+ ar_args = ['-ivf']
+ for dep in self.spec.dependencies(deptype='build'):
+ if os.path.exists(dep.prefix.share.aclocal):
+ ar_args.extend(['-I', dep.prefix.share.aclocal])
+ autoreconf(*ar_args)
+
args = [
"--prefix={0}".format(prefix),
"MOUNT_FUSE_PATH={0}".format(self.prefix.sbin),
diff --git a/var/spack/repos/builtin/packages/libint/package.py b/var/spack/repos/builtin/packages/libint/package.py
index 8f6ecfec17..d44eaa44fe 100644
--- a/var/spack/repos/builtin/packages/libint/package.py
+++ b/var/spack/repos/builtin/packages/libint/package.py
@@ -74,9 +74,7 @@ class Libint(AutotoolsPackage):
return "{0}/v{1}.tar.gz".format(base_url, version)
def autoreconf(self, spec, prefix):
- libtoolize()
- aclocal('-I', 'lib/autoconf')
- autoconf()
+ which('bash')('autogen.sh')
if '@2.6.0:' in spec:
# skip tarball creation and removal of dir with generated code
@@ -104,7 +102,10 @@ class Libint(AutotoolsPackage):
def configure_args(self):
- config_args = ['--enable-shared']
+ config_args = [
+ '--enable-shared',
+ '--with-boost={0}'.format(self.spec['boost'].prefix)
+ ]
# Optimization flag names have changed in libint 2
if self.version < Version('2.0.0'):
@@ -185,16 +186,18 @@ class Libint(AutotoolsPackage):
packages (CP2K notably).
"""
- super(Libint, self).build(spec, prefix)
-
# upstream says that using configure/make for the generated code
# is deprecated and one should use CMake, but with the currently
# recent 2.7.0.b1 it still doesn't work
+ # first generate the libint compiler
+ make('export')
+ # now build the library
with working_dir(os.path.join(self.build_directory, 'generated')):
# straight from the AutotoolsPackage class:
config_args = [
'--prefix={0}'.format(prefix),
'--enable-shared',
+ '--with-boost={0}'.format(self.spec['boost'].prefix),
'--with-cxx-optflags={0}'.format(self.optflags),
]
config_args += self.enable_or_disable(
diff --git a/var/spack/repos/builtin/packages/liblzf/package.py b/var/spack/repos/builtin/packages/liblzf/package.py
new file mode 100644
index 0000000000..35c0bcc2ad
--- /dev/null
+++ b/var/spack/repos/builtin/packages/liblzf/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Liblzf(AutotoolsPackage):
+ """LibLZF is a very small data compression library.
+
+ It consists of only two .c and two .h files and is very easy to incorporate into
+ your own programs. The compression algorithm is very, very fast, yet still written
+ in portable C."""
+
+ homepage = "http://oldhome.schmorp.de/marc/liblzf.html"
+ url = "http://dist.schmorp.de/liblzf/liblzf-3.6.tar.gz"
+
+ version('3.6', sha256='9c5de01f7b9ccae40c3f619d26a7abec9986c06c36d260c179cedd04b89fb46a')
diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py
index bdfeef48b7..fab4269455 100644
--- a/var/spack/repos/builtin/packages/libmonitor/package.py
+++ b/var/spack/repos/builtin/packages/libmonitor/package.py
@@ -16,6 +16,7 @@ class Libmonitor(AutotoolsPackage):
maintainers = ['mwkrentel']
version('master', branch='master')
+ version('2021.11.08', commit='22aa52c621534f12d401fa37f6963bfca7441e20')
version('2021.04.27', commit='a2d1b6be23410ef1ad2c9d0006672453803243c2')
version('2020.10.15', commit='36e5cb7ebeadfff01476b79ff04f6ec772ba831d')
version('2019.05.31', commit='c9767087d52e58a719aa7f149136b101e499db44')
diff --git a/var/spack/repos/builtin/packages/librsvg/package.py b/var/spack/repos/builtin/packages/librsvg/package.py
index 7351e56cb3..39d5bb871e 100644
--- a/var/spack/repos/builtin/packages/librsvg/package.py
+++ b/var/spack/repos/builtin/packages/librsvg/package.py
@@ -17,10 +17,12 @@ class Librsvg(AutotoolsPackage):
version('2.50.0', sha256='b3fadba240f09b9c9898ab20cb7311467243e607cf8f928b7c5f842474ee3df4')
version('2.44.14', sha256='6a85a7868639cdd4aa064245cc8e9d864dad8b8e9a4a8031bb09a4796bc4e303')
+ variant('doc', default=False, description='Build documentation with gtk-doc')
+
depends_on("gobject-introspection", type='build')
depends_on("pkgconfig", type='build')
depends_on("rust", type='build')
- depends_on('gtk-doc', type='build')
+ depends_on('gtk-doc', type='build', when='+doc')
depends_on("cairo+gobject")
depends_on("gdk-pixbuf")
depends_on("glib")
@@ -48,3 +50,8 @@ class Librsvg(AutotoolsPackage):
def setup_run_environment(self, env):
env.prepend_path('XDG_DATA_DIRS', self.prefix.share)
+
+ def configure_args(self):
+ return [
+ '--enable-gtk-doc=' + ('yes' if self.spec.variants['doc'].value else 'no')
+ ]
diff --git a/var/spack/repos/builtin/packages/libseccomp/package.py b/var/spack/repos/builtin/packages/libseccomp/package.py
index 6362cc9cce..b8d66d092c 100644
--- a/var/spack/repos/builtin/packages/libseccomp/package.py
+++ b/var/spack/repos/builtin/packages/libseccomp/package.py
@@ -10,16 +10,14 @@ class Libseccomp(AutotoolsPackage):
"""The main libseccomp repository"""
homepage = "https://github.com/seccomp/libseccomp"
- url = "https://github.com/seccomp/libseccomp/archive/v2.3.3.zip"
+ url = 'https://github.com/seccomp/libseccomp/releases/download/v2.5.3/libseccomp-2.5.3.tar.gz'
- version('2.3.3', sha256='627e114b3be2e66ed8d88b90037498333384d9bea822423662a44c3a8520e187')
+ version('2.5.3', sha256='59065c8733364725e9721ba48c3a99bbc52af921daf48df4b1e012fbc7b10a76')
+ version('2.3.3', sha256='7fc28f4294cc72e61c529bedf97e705c3acf9c479a8f1a3028d4cd2ca9f3b155')
variant('python', default=True, description="Build Python bindings")
- depends_on('autoconf', type='build')
- depends_on('automake', type='build')
- depends_on('libtool', type='build')
- depends_on('m4', type='build')
+ depends_on('gperf', type='build', when='@2.5:')
depends_on("py-cython", type="build", when="+python")
def configure_args(self):
diff --git a/var/spack/repos/builtin/packages/libsharp/1.0.0-arm.patch b/var/spack/repos/builtin/packages/libsharp/1.0.0-arm.patch
index 1f68c96ce9..1f68c96ce9 100755..100644
--- a/var/spack/repos/builtin/packages/libsharp/1.0.0-arm.patch
+++ b/var/spack/repos/builtin/packages/libsharp/1.0.0-arm.patch
diff --git a/var/spack/repos/builtin/packages/libsharp/arm.patch b/var/spack/repos/builtin/packages/libsharp/arm.patch
index 19ed264757..19ed264757 100755..100644
--- a/var/spack/repos/builtin/packages/libsharp/arm.patch
+++ b/var/spack/repos/builtin/packages/libsharp/arm.patch
diff --git a/var/spack/repos/builtin/packages/libslirp/package.py b/var/spack/repos/builtin/packages/libslirp/package.py
new file mode 100644
index 0000000000..95c292d76c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libslirp/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Libslirp(MesonPackage):
+ """General purpose TCP-IP emulator"""
+
+ homepage = 'https://gitlab.freedesktop.org/slirp/libslirp'
+ url = 'https://gitlab.freedesktop.org/slirp/libslirp/-/archive/v4.6.1/libslirp-v4.6.1.tar.gz'
+ maintainers = ['bernhardkaindl']
+
+ version('4.6.1', sha256='69ad4df0123742a29cc783b35de34771ed74d085482470df6313b6abeb799b11')
+
+ depends_on('pkgconfig', type='build')
+ depends_on('glib')
diff --git a/var/spack/repos/builtin/packages/libspatialite/package.py b/var/spack/repos/builtin/packages/libspatialite/package.py
index 09bef51747..b601df69b9 100644
--- a/var/spack/repos/builtin/packages/libspatialite/package.py
+++ b/var/spack/repos/builtin/packages/libspatialite/package.py
@@ -16,7 +16,8 @@ class Libspatialite(AutotoolsPackage):
url = "https://www.gaia-gis.it/gaia-sins/libspatialite-sources/libspatialite-4.3.0a.tar.gz"
manual_download = True
- version('5.0.0', preferred=True, sha256='7b7fd70243f5a0b175696d87c46dde0ace030eacc27f39241c24bac5dfac6dac')
+ version('5.0.1', sha256='eecbc94311c78012d059ebc0fae86ea5ef6eecb13303e6e82b3753c1b3409e98')
+ version('5.0.0', sha256='7b7fd70243f5a0b175696d87c46dde0ace030eacc27f39241c24bac5dfac6dac')
# Must download manually from:
# https://www.gaia-gis.it/fossil/libspatialite/info/c7f67038bf06d98d
# For instructions on the file:// below..
@@ -41,3 +42,4 @@ class Libspatialite(AutotoolsPackage):
depends_on('iconv')
depends_on('libxml2')
depends_on('minizip', when='@5.0.0:')
+ depends_on('librttopo', when='@5.0.1:')
diff --git a/var/spack/repos/builtin/packages/libssh2/package.py b/var/spack/repos/builtin/packages/libssh2/package.py
index 933a996b03..c863d796c4 100644
--- a/var/spack/repos/builtin/packages/libssh2/package.py
+++ b/var/spack/repos/builtin/packages/libssh2/package.py
@@ -12,20 +12,38 @@ class Libssh2(CMakePackage):
homepage = "https://www.libssh2.org/"
url = "https://www.libssh2.org/download/libssh2-1.7.0.tar.gz"
+ version('1.10.0', sha256='2d64e90f3ded394b91d3a2e774ca203a4179f69aebee03003e5a6fa621e41d51')
+ version('1.9.0', sha256='d5fb8bd563305fd1074dda90bd053fb2d29fc4bce048d182f96eaa466dfadafd')
version('1.8.0', sha256='39f34e2f6835f4b992cafe8625073a88e5a28ba78f83e8099610a7b3af4676d4')
version('1.7.0', sha256='e4561fd43a50539a8c2ceb37841691baf03ecb7daf043766da1b112e4280d584')
version('1.4.3', sha256='eac6f85f9df9db2e6386906a6227eb2cd7b3245739561cad7d6dc1d5d021b96d') # CentOS7
- variant('shared', default=True,
- description="Build shared libraries")
+ variant('crypto', default='openssl', values=('openssl', 'mbedtls'), multi=False)
+ variant('shared', default=True, description="Build shared libraries")
+
+ conflicts('crypto=mbedtls', when='@:1.7', msg='mbedtls only available from 1.8.0')
depends_on('cmake@2.8.11:', type='build')
- depends_on('openssl')
+ depends_on('openssl', when='crypto=openssl')
+ depends_on('openssl@:2', when='@:1.9 crypto=openssl')
+ depends_on('mbedtls@:2 +pic', when='crypto=mbedtls')
depends_on('zlib')
depends_on('xz')
def cmake_args(self):
- return [self.define_from_variant('BUILD_SHARED_LIBS', 'shared')]
+ args = [
+ self.define('BUILD_TESTING', 'OFF'),
+ self.define_from_variant('BUILD_SHARED_LIBS', 'shared')
+ ]
+
+ crypto = self.spec.variants['crypto'].value
+
+ if crypto == 'openssl':
+ args.append(self.define('CRYPTO_BACKEND', 'OpenSSL'))
+ elif crypto == 'mbedtls':
+ args.append(self.define('CRYPTO_BACKEND', 'mbedTLS'))
+
+ return args
@run_after('install')
def darwin_fix(self):
diff --git a/var/spack/repos/builtin/packages/libtree/package.py b/var/spack/repos/builtin/packages/libtree/package.py
index 5d5aaa78ab..08af134409 100644
--- a/var/spack/repos/builtin/packages/libtree/package.py
+++ b/var/spack/repos/builtin/packages/libtree/package.py
@@ -3,12 +3,11 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-from spack import *
+from spack.build_systems.cmake import CMakePackage
-class Libtree(CMakePackage):
- """ldd as a tree with an option to bundle dependencies into a
- single folder"""
+class Libtree(MakefilePackage):
+ """ldd as a tree"""
homepage = "https://github.com/haampie/libtree"
git = "https://github.com/haampie/libtree.git"
@@ -16,6 +15,8 @@ class Libtree(CMakePackage):
maintainers = ['haampie']
version('master', branch='master')
+ version('3.0.1', sha256='20d3cd66f5c74058de9dd594af8ffd639c795d27ab435c588a3cd43911c1604f')
+ version('3.0.0', sha256='6f7b069a8e5d86741e18a4c8a7e835ac530ae012dfc9509e00ffa694aa6818b1')
version('2.0.0', sha256='099e85d8ba3c3d849ce05b8ba2791dd25cd042a813be947fb321b0676ef71883')
version('1.2.3', sha256='4a912cf97109219fe931942a30579336b6ab9865395447bd157bbfa74bf4e8cf')
version('1.2.2', sha256='4ccf09227609869b85a170550b636defcf0b0674ecb0785063b81785b1c29bdd')
@@ -35,20 +36,27 @@ class Libtree(CMakePackage):
return "https://github.com/haampie/libtree/archive/refs/tags/v{0}.tar.gz".format(version)
- variant('chrpath', default=False, description='Use chrpath for deployment')
- variant('strip', default=False, description='Use binutils strip for deployment')
+ # Version 3.x (Makefile)
+ @when('@3:')
+ def install(self, spec, prefix):
+ make('install', 'PREFIX=' + prefix)
- # header only dependencies
- depends_on('cpp-termcolor', when='@2.0:', type='build')
- depends_on('cxxopts', when='@2.0:', type='build')
- depends_on('elfio', when='@2.0:', type='build')
-
- # runtime deps
- depends_on('chrpath', when='+chrpath', type='run')
- depends_on('binutils', when='+strip', type='run')
+ # Version 2.x and earlier (CMake)
+ with when('@:2'):
+ variant('chrpath', default=False, description='Use chrpath for deployment')
+ variant('strip', default=False, description='Use binutils strip for deployment')
+ variant('build_type', default='RelWithDebInfo',
+ description='CMake build type',
+ values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
+ depends_on('googletest', type='test')
+ depends_on('cmake@3:', type='build')
+ depends_on('chrpath', when='+chrpath', type='run')
+ depends_on('binutils', when='+strip', type='run')
- # testing
- depends_on('googletest', type='test')
+ # header only dependencies
+ depends_on('cpp-termcolor', when='@2.0.0:2', type='build')
+ depends_on('cxxopts', when='@2.0.0:2', type='build')
+ depends_on('elfio', when='@2.0.0:2', type='build')
def cmake_args(self):
tests_enabled = 'ON' if self.run_tests else 'OFF'
@@ -58,9 +66,17 @@ class Libtree(CMakePackage):
tests_define = 'BUILD_TESTING'
return [
- self.define(tests_define, tests_enabled)
+ CMakePackage.define(tests_define, tests_enabled)
]
+ @when('@:2')
+ def edit(self, spec, prefix):
+ options = CMakePackage._std_args(self) + self.cmake_args()
+ options.append(self.stage.source_path)
+ with working_dir(self.build_directory):
+ cmake(*options)
+
+ @when('@:2')
def check(self):
with working_dir(self.build_directory):
ctest('--output-on-failure')
diff --git a/var/spack/repos/builtin/packages/libxc/package.py b/var/spack/repos/builtin/packages/libxc/package.py
index cdfcc52433..cfbbd953e8 100644
--- a/var/spack/repos/builtin/packages/libxc/package.py
+++ b/var/spack/repos/builtin/packages/libxc/package.py
@@ -13,6 +13,7 @@ class Libxc(AutotoolsPackage, CudaPackage):
homepage = "https://tddft.org/programs/libxc/"
url = "https://www.tddft.org/programs/libxc/down.php?file=2.2.2/libxc-2.2.2.tar.gz"
+ version('5.1.7', sha256='1a818fdfe5c5f74270bc8ef0c59064e8feebcd66b8f642c08aecc1e7d125be34')
version('5.1.5', sha256='02e4615a22dc3ec87a23efbd3d9be5bfad2445337140bad1720699571c45c3f9')
version('5.1.3', sha256='0350defdd6c1b165e4cf19995f590eee6e0b9db95a6b221d28cecec40f4e85cd')
version('5.1.2', sha256='180d52b5552921d1fac8a10869dd30708c0fb41dc202a3bbee0e36f43872718a')
diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py
index df1e6e51bf..a9c4973ff3 100644
--- a/var/spack/repos/builtin/packages/libxsmm/package.py
+++ b/var/spack/repos/builtin/packages/libxsmm/package.py
@@ -15,12 +15,13 @@ class Libxsmm(MakefilePackage):
and deep learning primitives."""
homepage = 'https://github.com/hfp/libxsmm'
- url = 'https://github.com/hfp/libxsmm/archive/1.16.3.tar.gz'
+ url = 'https://github.com/hfp/libxsmm/archive/1.17.tar.gz'
git = 'https://github.com/hfp/libxsmm.git'
maintainers = ['hfp']
version('master', branch='master')
+ version('1.17', sha256='8b642127880e92e8a75400125307724635ecdf4020ca4481e5efe7640451bb92')
version('1.16.3', sha256='e491ccadebc5cdcd1fc08b5b4509a0aba4e2c096f53d7880062a66b82a0baf84')
version('1.16.2', sha256='bdc7554b56b9e0a380fc9c7b4f4394b41be863344858bc633bc9c25835c4c64e')
version('1.16.1', sha256='93dc7a3ec40401988729ddb2c6ea2294911261f7e6cd979cf061b5c3691d729d')
diff --git a/var/spack/repos/builtin/packages/libzmq/package.py b/var/spack/repos/builtin/packages/libzmq/package.py
index ca17fad155..20b525081d 100644
--- a/var/spack/repos/builtin/packages/libzmq/package.py
+++ b/var/spack/repos/builtin/packages/libzmq/package.py
@@ -14,6 +14,7 @@ class Libzmq(AutotoolsPackage):
git = "https://github.com/zeromq/libzmq.git"
version('master', branch='master')
+ version('4.3.4', sha256='c593001a89f5a85dd2ddf564805deb860e02471171b3f204944857336295c3e5')
version('4.3.3', sha256='9d9285db37ae942ed0780c016da87060497877af45094ff9e1a1ca736e3875a2')
version('4.3.2', sha256='ebd7b5c830d6428956b67a0454a7f8cbed1de74b3b01e5c33c5378e22740f763')
version('4.3.1', sha256='bcbabe1e2c7d0eec4ed612e10b94b112dd5f06fcefa994a0c79a45d835cd21eb')
@@ -40,6 +41,8 @@ class Libzmq(AutotoolsPackage):
depends_on('automake', type='build', when='@develop')
depends_on('libtool', type='build', when='@develop')
depends_on('pkgconfig', type='build')
+ depends_on('docbook-xml', type='build')
+ depends_on('docbook-xsl', type='build')
depends_on('libbsd', type='link', when='@4.3.3: platform=linux')
depends_on('libbsd', type='link', when='@4.3.3: platform=cray')
diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py
index 95117f67b1..c9cdff2aa2 100644
--- a/var/spack/repos/builtin/packages/likwid/package.py
+++ b/var/spack/repos/builtin/packages/likwid/package.py
@@ -22,6 +22,7 @@ class Likwid(Package):
git = "https://github.com/RRZE-HPC/likwid.git"
maintainers = ['TomTheBear']
+ version('5.2.1', sha256='1b8e668da117f24302a344596336eca2c69d2bc2f49fa228ca41ea0688f6cbc2')
version('5.2.0', sha256='aa6dccacfca59e52d8f3be187ffcf292b2a2fa1f51a81bf8912b9d48e5a257e0')
version('5.1.1', sha256='faec7c62987967232f476a6ff0ee85af686fd24b5a360126896b7f435d1f943f')
version('5.1.0', sha256='5a180702a1656c6315b861a85031ab4cb090424aec42cbbb326b849e29f55571')
diff --git a/var/spack/repos/builtin/packages/llvm-doe/package.py b/var/spack/repos/builtin/packages/llvm-doe/package.py
index c384210561..e2d1284116 100644
--- a/var/spack/repos/builtin/packages/llvm-doe/package.py
+++ b/var/spack/repos/builtin/packages/llvm-doe/package.py
@@ -2,12 +2,14 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
import os.path
import re
import sys
import llnl.util.tty as tty
+import spack.build_environment
import spack.util.executable
@@ -19,15 +21,21 @@ class LlvmDoe(CMakePackage, CudaPackage):
homepage = "https://github.com/llvm-doe-org"
url = "https://github.com/llvm-doe-org/llvm-project/archive/llvmorg-10.0.0.zip"
git = "https://github.com/llvm-doe-org/llvm-project"
- maintainers = ['shintaro-iwasaki']
+ maintainers = ['vlkale']
+
tags = ['e4s']
+ generator = 'Ninja'
+
+ family = "compiler" # Used by lmod
+
version('doe', branch='doe', preferred=True)
version('upstream', branch='llvm.org/main')
version('bolt', branch='bolt/main')
version('clacc', branch='clacc/master')
version('pragma-clang-loop', branch='sollve/pragma-clang-loop')
version('pragma-omp-tile', branch='sollve/pragma-omp-tile')
+ version('13.0.0', branch='llvm.org/llvmorg-13.0.0')
# NOTE: The debug version of LLVM is an order of magnitude larger than
# the release version, and may take up 20-30 GB of space. If you want
@@ -95,6 +103,11 @@ class LlvmDoe(CMakePackage, CudaPackage):
"components in a single shared library",
)
variant(
+ "link_llvm_dylib",
+ default=False,
+ description="Link LLVM tools against the LLVM shared library",
+ )
+ variant(
"all_targets",
default=False,
description="Build all supported targets, default targets "
@@ -112,6 +125,11 @@ class LlvmDoe(CMakePackage, CudaPackage):
description="Build with OpenMP capable thread sanitizer",
)
variant(
+ "omp_as_runtime",
+ default=True,
+ description="Build OpenMP runtime via ENABLE_RUNTIME by just-built Clang",
+ )
+ variant(
"argobots",
default=False,
description="Build BOLT/OpenMP with Argobots. Effective when @bolt",
@@ -120,16 +138,21 @@ class LlvmDoe(CMakePackage, CudaPackage):
description="Enable code-signing on macOS")
variant("python", default=False, description="Install python bindings")
+ variant('version_suffix', default='none', description="Add a symbol suffix")
+ variant('z3', default=False, description='Use Z3 for the clang static analyzer')
+
extends("python", when="+python")
# Build dependency
depends_on("cmake@3.4.3:", type="build")
+ depends_on('cmake@3.13.4:', type='build', when='@12:')
+ depends_on("ninja", type="build")
depends_on("python", when="~python", type="build")
depends_on("pkgconfig", type="build")
# Universal dependency
depends_on("python", when="+python")
- depends_on("z3")
+ depends_on("z3", when='+clang+z3')
# openmp dependencies
depends_on("perl-data-dumper", type=("build"))
@@ -146,22 +169,56 @@ class LlvmDoe(CMakePackage, CudaPackage):
depends_on("py-six", when="+lldb +python")
# gold support, required for some features
- depends_on("binutils+gold", when="+gold")
+ depends_on("binutils+gold+ld+plugins", when="+gold")
conflicts("+llvm_dylib", when="+shared_libs")
+ conflicts("+link_llvm_dylib", when="~llvm_dylib")
conflicts("+lldb", when="~clang")
conflicts("+libcxx", when="~clang")
conflicts("+internal_unwind", when="~clang")
conflicts("+compiler-rt", when="~clang")
+ conflicts("+flang", when="~clang")
- conflicts("%gcc@:5.0")
+ conflicts('~mlir', when='+flang', msg='Flang requires MLIR')
+
+ # Older LLVM do not build with newer compilers, and vice versa
+ conflicts("%gcc@8:", when="@:5")
+ conflicts("%gcc@:5.0", when="@8:")
+ # clang/lib: a lambda parameter cannot shadow an explicitly captured entity
+ conflicts("%clang@8:", when="@:4")
+
+ # When these versions are concretized, but not explicitly with +libcxx, these
+ # conflicts will enable clingo to set ~libcxx, making the build successful:
+
+ # libc++ of LLVM13, see https://libcxx.llvm.org/#platform-and-compiler-support
+ # @13 does not support %gcc@:10 https://bugs.llvm.org/show_bug.cgi?id=51359#c1
+ # GCC 11 - latest stable release per GCC release page
+ # Clang: 11, 12 - latest two stable releases per LLVM release page
+ # AppleClang 12 - latest stable release per Xcode release page
+ conflicts("%gcc@:10", when="@13:+libcxx")
+ conflicts("%clang@:10", when="@13:+libcxx")
+ conflicts("%apple-clang@:11", when="@13:+libcxx")
+
+ # libcxx-4 and compiler-rt-4 fail to build with "newer" clang and gcc versions:
+ conflicts('%gcc@7:', when='@:4+libcxx')
+ conflicts('%clang@6:', when='@:4+libcxx')
+ conflicts('%apple-clang@6:', when='@:4+libcxx')
+ conflicts('%gcc@7:', when='@:4+compiler-rt')
+ conflicts('%clang@6:', when='@:4+compiler-rt')
+ conflicts('%apple-clang@6:', when='@:4+compiler-rt')
+
+ # OMP TSAN exists in > 5.x
+ conflicts("+omp_tsan", when="@:5")
+
+ # OpenMP via ENABLE_RUNTIME restrictions
+ conflicts("+omp_as_runtime", when="~clang", msg="omp_as_runtime requires clang being built.")
+ conflicts("+omp_as_runtime", when="@:11.1", msg="omp_as_runtime works since LLVM 12.")
# cuda_arch value must be specified
conflicts("cuda_arch=none", when="+cuda", msg="A value for cuda_arch must be specified.")
- conflicts("+mlir")
-
- conflicts("+flang", when="~clang")
+ # MLIR exists in > 10.x
+ conflicts("+mlir", when="@:9")
# code signing is only necessary on macOS",
conflicts('+code_signing', when='platform=linux')
@@ -348,8 +405,25 @@ class LlvmDoe(CMakePackage, CudaPackage):
'create this identity.'
)
+ def flag_handler(self, name, flags):
+ if name == 'cxxflags':
+ flags.append(self.compiler.cxx11_flag)
+ return(None, flags, None)
+ elif name == 'ldflags' and self.spec.satisfies('%intel'):
+ flags.append('-shared-intel')
+ return(None, flags, None)
+ return(flags, None, None)
+
def setup_build_environment(self, env):
- env.append_flags("CXXFLAGS", self.compiler.cxx11_flag)
+ """When using %clang, add only its ld.lld-$ver and/or ld.lld to our PATH"""
+ if self.compiler.name in ['clang', 'apple-clang']:
+ for lld in 'ld.lld-{0}'.format(self.compiler.version.version[0]), 'ld.lld':
+ bin = os.path.join(os.path.dirname(self.compiler.cc), lld)
+ sym = os.path.join(self.stage.path, 'ld.lld')
+ if os.path.exists(bin) and not os.path.exists(sym):
+ mkdirp(self.stage.path)
+ os.symlink(bin, sym)
+ env.prepend_path('PATH', self.stage.path)
def setup_run_environment(self, env):
if "+clang" in self.spec:
@@ -363,71 +437,87 @@ class LlvmDoe(CMakePackage, CudaPackage):
def cmake_args(self):
spec = self.spec
+ define = CMakePackage.define
+ from_variant = self.define_from_variant
+
python = spec['python']
cmake_args = [
- "-DLLVM_REQUIRES_RTTI:BOOL=ON",
- "-DLLVM_ENABLE_RTTI:BOOL=ON",
- "-DLLVM_ENABLE_EH:BOOL=ON",
- "-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp",
- "-DPYTHON_EXECUTABLE:PATH={0}".format(python.command.path),
- "-DLIBOMP_USE_HWLOC:BOOL=ON",
- "-DLIBOMP_HWLOC_INSTALL_DIR={0}".format(spec["hwloc"].prefix),
+ define("LLVM_REQUIRES_RTTI", True),
+ define("LLVM_ENABLE_RTTI", True),
+ define("LLVM_ENABLE_EH", True),
+ define("CLANG_DEFAULT_OPENMP_RUNTIME", "libomp"),
+ define("PYTHON_EXECUTABLE", python.command.path),
+ define("LIBOMP_USE_HWLOC", True),
+ define("LIBOMP_HWLOC_INSTALL_DIR", spec["hwloc"].prefix),
]
- if python.version >= Version("3.0.0"):
- cmake_args.append("-DPython3_EXECUTABLE={0}".format(
- python.command.path))
+ version_suffix = spec.variants['version_suffix'].value
+ if version_suffix != 'none':
+ cmake_args.append(define('LLVM_VERSION_SUFFIX', version_suffix))
+
+ if python.version >= Version("3"):
+ cmake_args.append(define("Python3_EXECUTABLE", python.command.path))
else:
- cmake_args.append("-DPython2_EXECUTABLE={0}".format(
- python.command.path))
+ cmake_args.append(define("Python2_EXECUTABLE", python.command.path))
projects = []
+ runtimes = []
if "+cuda" in spec:
- cmake_args.extend(
- [
- "-DCUDA_TOOLKIT_ROOT_DIR:PATH=" + spec["cuda"].prefix,
- "-DLIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES={0}".format(
- ",".join(spec.variants["cuda_arch"].value)
- ),
- "-DCLANG_OPENMP_NVPTX_DEFAULT_ARCH=sm_{0}".format(
- spec.variants["cuda_arch"].value[-1]
- ),
- ]
- )
+ cmake_args.extend([
+ define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix),
+ define("LIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES",
+ ",".join(spec.variants["cuda_arch"].value)),
+ define("CLANG_OPENMP_NVPTX_DEFAULT_ARCH",
+ "sm_{0}".format(spec.variants["cuda_arch"].value[-1])),
+ ])
+ if "+omp_as_runtime" in spec:
+ cmake_args.extend([
+ define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
+ # work around bad libelf detection in libomptarget
+ define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
+ spec["libelf"].prefix.include),
+ ])
else:
# still build libomptarget but disable cuda
- cmake_args.extend(
- [
- "-DCUDA_TOOLKIT_ROOT_DIR:PATH=IGNORE",
- "-DCUDA_SDK_ROOT_DIR:PATH=IGNORE",
- "-DCUDA_NVCC_EXECUTABLE:FILEPATH=IGNORE",
- "-DLIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES:STRING=IGNORE",
- ]
- )
-
- if "+omp_debug" in spec:
- cmake_args.append("-DLIBOMPTARGET_ENABLE_DEBUG:Bool=ON")
+ cmake_args.extend([
+ define("CUDA_TOOLKIT_ROOT_DIR", "IGNORE"),
+ define("CUDA_SDK_ROOT_DIR", "IGNORE"),
+ define("CUDA_NVCC_EXECUTABLE", "IGNORE"),
+ define("LIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES", "IGNORE"),
+ ])
- if "+python" in spec and "+lldb" in spec:
- cmake_args.append("-DLLDB_USE_SYSTEM_SIX:Bool=TRUE")
+ cmake_args.append(from_variant("LIBOMPTARGET_ENABLE_DEBUG", "omp_debug"))
- if "+lldb" in spec and spec.satisfies("@10.0.0:,doe"):
- cmake_args.append("-DLLDB_ENABLE_PYTHON:Bool={0}".format(
- 'ON' if '+python' in spec else 'OFF'))
- if "+lldb" in spec and spec.satisfies("@:9.9.9"):
- cmake_args.append("-DLLDB_DISABLE_PYTHON:Bool={0}".format(
- 'ON' if '~python' in spec else 'OFF'))
+ if "+lldb" in spec:
+ if spec.version >= Version('10'):
+ cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", 'python'))
+ else:
+ cmake_args.append(define("LLDB_DISABLE_PYTHON",
+ '~python' in spec))
+ if spec.satisfies("@5.0.0: +python"):
+ cmake_args.append(define("LLDB_USE_SYSTEM_SIX", True))
if "+gold" in spec:
cmake_args.append(
- "-DLLVM_BINUTILS_INCDIR=" + spec["binutils"].prefix.include
+ define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include)
)
if "+clang" in spec:
projects.append("clang")
projects.append("clang-tools-extra")
- projects.append("openmp")
+ if "+omp_as_runtime" in spec:
+ runtimes.append("openmp")
+ else:
+ projects.append("openmp")
+
+ if self.spec.satisfies("@8"):
+ cmake_args.append(define('CLANG_ANALYZER_ENABLE_Z3_SOLVER',
+ self.spec.satisfies('@8+z3')))
+ if self.spec.satisfies("@9:"):
+ cmake_args.append(define('LLVM_ENABLE_Z3_SOLVER',
+ self.spec.satisfies('@9:+z3')))
+
if "+flang" in spec:
projects.append("flang")
if "+lldb" in spec:
@@ -439,53 +529,48 @@ class LlvmDoe(CMakePackage, CudaPackage):
if "+libcxx" in spec:
projects.append("libcxx")
projects.append("libcxxabi")
- cmake_args.append("-DCLANG_DEFAULT_CXX_STDLIB=libc++")
if "+mlir" in spec:
projects.append("mlir")
if "+internal_unwind" in spec:
projects.append("libunwind")
if "+polly" in spec:
projects.append("polly")
- cmake_args.append("-DLINK_POLLY_INTO_TOOLS:Bool=ON")
-
- if "+shared_libs" in spec:
- cmake_args.append("-DBUILD_SHARED_LIBS:Bool=ON")
- if "+llvm_dylib" in spec:
- cmake_args.append("-DLLVM_BUILD_LLVM_DYLIB:Bool=ON")
- if "+omp_debug" in spec:
- cmake_args.append("-DLIBOMPTARGET_ENABLE_DEBUG:Bool=ON")
-
- if "+split_dwarf" in spec:
- cmake_args.append("-DLLVM_USE_SPLIT_DWARF:Bool=ON")
+ cmake_args.append(define("LINK_POLLY_INTO_TOOLS", True))
+
+ cmake_args.extend([
+ from_variant("BUILD_SHARED_LIBS", "shared_libs"),
+ from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
+ from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
+ from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
+ # By default on Linux, libc++.so is a ldscript. CMake fails to add
+ # CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
+ # into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
+ define('LIBCXX_ENABLE_STATIC_ABI_LIBRARY', True)
+ ])
if "+all_targets" not in spec: # all is default on cmake
targets = ["NVPTX", "AMDGPU"]
- if spec.target.family == "x86" or spec.target.family == "x86_64":
+ if spec.version < Version("3.9.0"):
+ # Starting in 3.9.0 CppBackend is no longer a target (see
+ # LLVM_ALL_TARGETS in llvm's top-level CMakeLists.txt for
+ # the complete list of targets)
+ targets.append("CppBackend")
+
+ if spec.target.family in ("x86", "x86_64"):
targets.append("X86")
elif spec.target.family == "arm":
targets.append("ARM")
elif spec.target.family == "aarch64":
targets.append("AArch64")
- elif (
- spec.target.family == "sparc"
- or spec.target.family == "sparc64"
- ):
+ elif spec.target.family in ("sparc", "sparc64"):
targets.append("Sparc")
- elif (
- spec.target.family == "ppc64"
- or spec.target.family == "ppc64le"
- or spec.target.family == "ppc"
- or spec.target.family == "ppcle"
- ):
+ elif spec.target.family in ("ppc64", "ppc64le", "ppc", "ppcle"):
targets.append("PowerPC")
- cmake_args.append(
- "-DLLVM_TARGETS_TO_BUILD:STRING=" + ";".join(targets)
- )
+ cmake_args.append(define("LLVM_TARGETS_TO_BUILD", targets))
- if "+omp_tsan" in spec:
- cmake_args.append("-DLIBOMP_TSAN_SUPPORT=ON")
+ cmake_args.append(from_variant("LIBOMP_TSAN_SUPPORT", "omp_tsan"))
if spec.satisfies("@bolt"):
projects.remove("openmp")
@@ -495,64 +580,64 @@ class LlvmDoe(CMakePackage, CudaPackage):
cmake_args.append("-DLIBOMP_USE_ARGOBOTS=ON")
if self.compiler.name == "gcc":
- gcc_prefix = ancestor(self.compiler.cc, 2)
- cmake_args.append("-DGCC_INSTALL_PREFIX=" + gcc_prefix)
+ compiler = Executable(self.compiler.cc)
+ gcc_output = compiler('-print-search-dirs', output=str, error=str)
+
+ for line in gcc_output.splitlines():
+ if line.startswith("install:"):
+ # Get path and strip any whitespace
+ # (causes oddity with ancestor)
+ gcc_prefix = line.split(":")[1].strip()
+ gcc_prefix = ancestor(gcc_prefix, 4)
+ break
+ cmake_args.append(define("GCC_INSTALL_PREFIX", gcc_prefix))
- if spec.satisfies("platform=cray") or spec.satisfies("platform=linux"):
- cmake_args.append("-DCMAKE_BUILD_WITH_INSTALL_RPATH=1")
+ # if spec.satisfies("platform=cray") or spec.satisfies("platform=linux"):
+ # cmake_args.append("-DCMAKE_BUILD_WITH_INSTALL_RPATH=1")
if self.spec.satisfies("~code_signing platform=darwin"):
- cmake_args.append('-DLLDB_USE_SYSTEM_DEBUGSERVER=ON')
+ cmake_args.append(define('LLDB_USE_SYSTEM_DEBUGSERVER', True))
# Semicolon seperated list of projects to enable
- cmake_args.append(
- "-DLLVM_ENABLE_PROJECTS:STRING={0}".format(";".join(projects))
- )
+ cmake_args.append(define("LLVM_ENABLE_PROJECTS", projects))
- return cmake_args
+ # Semicolon seperated list of runtimes to enable
+ if runtimes:
+ cmake_args.append(define("LLVM_ENABLE_RUNTIMES", runtimes))
- @run_before("build")
- def pre_install(self):
- with working_dir(self.build_directory):
- # When building shared libraries these need to be installed first
- make("install-LLVMTableGen")
- make("install-LLVMDemangle")
- make("install-LLVMSupport")
+ return cmake_args
@run_after("install")
def post_install(self):
spec = self.spec
+ define = CMakePackage.define
- # unnecessary if we get bootstrap builds in here
- if "+cuda" in self.spec:
+ # unnecessary if we build openmp via LLVM_ENABLE_RUNTIMES
+ if "+cuda ~omp_as_runtime" in self.spec:
ompdir = "build-bootstrapped-omp"
+ prefix_paths = spack.build_environment.get_cmake_prefix_path(self)
+ prefix_paths.append(str(spec.prefix))
# rebuild libomptarget to get bytecode runtime library files
with working_dir(ompdir, create=True):
cmake_args = [
- self.stage.source_path + "/openmp",
- "-DCMAKE_C_COMPILER:PATH={0}".format(
- spec.prefix.bin + "/clang"
- ),
- "-DCMAKE_CXX_COMPILER:PATH={0}".format(
- spec.prefix.bin + "/clang++"
- ),
- "-DCMAKE_INSTALL_PREFIX:PATH={0}".format(spec.prefix),
+ '-G', 'Ninja',
+ define('CMAKE_BUILD_TYPE', spec.variants['build_type'].value),
+ define("CMAKE_C_COMPILER", spec.prefix.bin + "/clang"),
+ define("CMAKE_CXX_COMPILER", spec.prefix.bin + "/clang++"),
+ define("CMAKE_INSTALL_PREFIX", spec.prefix),
+ define('CMAKE_PREFIX_PATH', prefix_paths)
]
cmake_args.extend(self.cmake_args())
- cmake_args.append(
- "-DLIBOMPTARGET_NVPTX_ENABLE_BCLIB:BOOL=TRUE"
- )
-
- # work around bad libelf detection in libomptarget
- cmake_args.append(
- "-DLIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR:String={0}".format(
- spec["libelf"].prefix.include
- )
- )
+ cmake_args.extend([
+ define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
+ define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
+ spec["libelf"].prefix.include),
+ self.stage.source_path + "/openmp",
+ ])
cmake(*cmake_args)
- make()
- make("install")
+ ninja()
+ ninja("install")
if "+python" in self.spec:
install_tree("llvm/bindings/python", site_packages_dir)
diff --git a/var/spack/repos/builtin/packages/llvm-openmp/package.py b/var/spack/repos/builtin/packages/llvm-openmp/package.py
index 41ae794211..41ae794211 100755..100644
--- a/var/spack/repos/builtin/packages/llvm-openmp/package.py
+++ b/var/spack/repos/builtin/packages/llvm-openmp/package.py
diff --git a/var/spack/repos/builtin/packages/llvm/llvm4-lld-ELF-Symbols.patch b/var/spack/repos/builtin/packages/llvm/llvm4-lld-ELF-Symbols.patch
new file mode 100644
index 0000000000..1a86cda358
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/llvm4-lld-ELF-Symbols.patch
@@ -0,0 +1,112 @@
+--- a/lldb/include/lldb/Utility/TaskPool.h
++++ b/lldb/include/lldb/Utility/TaskPool.h
+@@ -33,6 +33,7 @@
+ #include <queue>
+ #include <thread>
+ #include <vector>
++#include <functional>
+
+ // Global TaskPool class for running tasks in parallel on a set of worker thread
+ // created the first
+# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463
+--- a/lld/ELF/LTO.cpp
++++ b/lld/ELF/LTO.cpp
+@@ -158,7 +158,7 @@
+ return Ret;
+ }
+
+-template void BitcodeCompiler::template add<ELF32LE>(BitcodeFile &);
+-template void BitcodeCompiler::template add<ELF32BE>(BitcodeFile &);
+-template void BitcodeCompiler::template add<ELF64LE>(BitcodeFile &);
+-template void BitcodeCompiler::template add<ELF64BE>(BitcodeFile &);
++template void BitcodeCompiler::add<ELF32LE>(BitcodeFile &);
++template void BitcodeCompiler::add<ELF32BE>(BitcodeFile &);
++template void BitcodeCompiler::add<ELF64LE>(BitcodeFile &);
++template void BitcodeCompiler::add<ELF64BE>(BitcodeFile &);
+--- a/lld/ELF/Symbols.cpp
++++ b/lld/ELF/Symbols.cpp
+@@ -343,45 +343,45 @@
+ template bool SymbolBody::hasThunk<ELF64LE>() const;
+ template bool SymbolBody::hasThunk<ELF64BE>() const;
+
+-template uint32_t SymbolBody::template getVA<ELF32LE>(uint32_t) const;
+-template uint32_t SymbolBody::template getVA<ELF32BE>(uint32_t) const;
+-template uint64_t SymbolBody::template getVA<ELF64LE>(uint64_t) const;
+-template uint64_t SymbolBody::template getVA<ELF64BE>(uint64_t) const;
+-
+-template uint32_t SymbolBody::template getGotVA<ELF32LE>() const;
+-template uint32_t SymbolBody::template getGotVA<ELF32BE>() const;
+-template uint64_t SymbolBody::template getGotVA<ELF64LE>() const;
+-template uint64_t SymbolBody::template getGotVA<ELF64BE>() const;
+-
+-template uint32_t SymbolBody::template getGotOffset<ELF32LE>() const;
+-template uint32_t SymbolBody::template getGotOffset<ELF32BE>() const;
+-template uint64_t SymbolBody::template getGotOffset<ELF64LE>() const;
+-template uint64_t SymbolBody::template getGotOffset<ELF64BE>() const;
+-
+-template uint32_t SymbolBody::template getGotPltVA<ELF32LE>() const;
+-template uint32_t SymbolBody::template getGotPltVA<ELF32BE>() const;
+-template uint64_t SymbolBody::template getGotPltVA<ELF64LE>() const;
+-template uint64_t SymbolBody::template getGotPltVA<ELF64BE>() const;
+-
+-template uint32_t SymbolBody::template getThunkVA<ELF32LE>() const;
+-template uint32_t SymbolBody::template getThunkVA<ELF32BE>() const;
+-template uint64_t SymbolBody::template getThunkVA<ELF64LE>() const;
+-template uint64_t SymbolBody::template getThunkVA<ELF64BE>() const;
+-
+-template uint32_t SymbolBody::template getGotPltOffset<ELF32LE>() const;
+-template uint32_t SymbolBody::template getGotPltOffset<ELF32BE>() const;
+-template uint64_t SymbolBody::template getGotPltOffset<ELF64LE>() const;
+-template uint64_t SymbolBody::template getGotPltOffset<ELF64BE>() const;
+-
+-template uint32_t SymbolBody::template getPltVA<ELF32LE>() const;
+-template uint32_t SymbolBody::template getPltVA<ELF32BE>() const;
+-template uint64_t SymbolBody::template getPltVA<ELF64LE>() const;
+-template uint64_t SymbolBody::template getPltVA<ELF64BE>() const;
+-
+-template uint32_t SymbolBody::template getSize<ELF32LE>() const;
+-template uint32_t SymbolBody::template getSize<ELF32BE>() const;
+-template uint64_t SymbolBody::template getSize<ELF64LE>() const;
+-template uint64_t SymbolBody::template getSize<ELF64BE>() const;
++template uint32_t SymbolBody::getVA<ELF32LE>(uint32_t) const;
++template uint32_t SymbolBody::getVA<ELF32BE>(uint32_t) const;
++template uint64_t SymbolBody::getVA<ELF64LE>(uint64_t) const;
++template uint64_t SymbolBody::getVA<ELF64BE>(uint64_t) const;
++
++template uint32_t SymbolBody::getGotVA<ELF32LE>() const;
++template uint32_t SymbolBody::getGotVA<ELF32BE>() const;
++template uint64_t SymbolBody::getGotVA<ELF64LE>() const;
++template uint64_t SymbolBody::getGotVA<ELF64BE>() const;
++
++template uint32_t SymbolBody::getGotOffset<ELF32LE>() const;
++template uint32_t SymbolBody::getGotOffset<ELF32BE>() const;
++template uint64_t SymbolBody::getGotOffset<ELF64LE>() const;
++template uint64_t SymbolBody::getGotOffset<ELF64BE>() const;
++
++template uint32_t SymbolBody::getGotPltVA<ELF32LE>() const;
++template uint32_t SymbolBody::getGotPltVA<ELF32BE>() const;
++template uint64_t SymbolBody::getGotPltVA<ELF64LE>() const;
++template uint64_t SymbolBody::getGotPltVA<ELF64BE>() const;
++
++template uint32_t SymbolBody::getThunkVA<ELF32LE>() const;
++template uint32_t SymbolBody::getThunkVA<ELF32BE>() const;
++template uint64_t SymbolBody::getThunkVA<ELF64LE>() const;
++template uint64_t SymbolBody::getThunkVA<ELF64BE>() const;
++
++template uint32_t SymbolBody::getGotPltOffset<ELF32LE>() const;
++template uint32_t SymbolBody::getGotPltOffset<ELF32BE>() const;
++template uint64_t SymbolBody::getGotPltOffset<ELF64LE>() const;
++template uint64_t SymbolBody::getGotPltOffset<ELF64BE>() const;
++
++template uint32_t SymbolBody::getPltVA<ELF32LE>() const;
++template uint32_t SymbolBody::getPltVA<ELF32BE>() const;
++template uint64_t SymbolBody::getPltVA<ELF64LE>() const;
++template uint64_t SymbolBody::getPltVA<ELF64BE>() const;
++
++template uint32_t SymbolBody::getSize<ELF32LE>() const;
++template uint32_t SymbolBody::getSize<ELF32BE>() const;
++template uint64_t SymbolBody::getSize<ELF64LE>() const;
++template uint64_t SymbolBody::getSize<ELF64BE>() const;
+
+ template class elf::Undefined<ELF32LE>;
+ template class elf::Undefined<ELF32BE>;
diff --git a/var/spack/repos/builtin/packages/llvm/llvm5-lld-ELF-Symbols.patch b/var/spack/repos/builtin/packages/llvm/llvm5-lld-ELF-Symbols.patch
new file mode 100644
index 0000000000..727647d3b8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/llvm5-lld-ELF-Symbols.patch
@@ -0,0 +1,33 @@
+# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463
+--- a/lld/ELF/Symbols.cpp
++++ b/lld/ELF/Symbols.cpp
+@@ -383,17 +383,17 @@
+ return B.getName();
+ }
+
+-template uint32_t SymbolBody::template getSize<ELF32LE>() const;
+-template uint32_t SymbolBody::template getSize<ELF32BE>() const;
+-template uint64_t SymbolBody::template getSize<ELF64LE>() const;
+-template uint64_t SymbolBody::template getSize<ELF64BE>() const;
++template uint32_t SymbolBody::getSize<ELF32LE>() const;
++template uint32_t SymbolBody::getSize<ELF32BE>() const;
++template uint64_t SymbolBody::getSize<ELF64LE>() const;
++template uint64_t SymbolBody::getSize<ELF64BE>() const;
+
+-template bool DefinedRegular::template isMipsPIC<ELF32LE>() const;
+-template bool DefinedRegular::template isMipsPIC<ELF32BE>() const;
+-template bool DefinedRegular::template isMipsPIC<ELF64LE>() const;
+-template bool DefinedRegular::template isMipsPIC<ELF64BE>() const;
++template bool DefinedRegular::isMipsPIC<ELF32LE>() const;
++template bool DefinedRegular::isMipsPIC<ELF32BE>() const;
++template bool DefinedRegular::isMipsPIC<ELF64LE>() const;
++template bool DefinedRegular::isMipsPIC<ELF64BE>() const;
+
+-template uint32_t SharedSymbol::template getAlignment<ELF32LE>() const;
+-template uint32_t SharedSymbol::template getAlignment<ELF32BE>() const;
+-template uint32_t SharedSymbol::template getAlignment<ELF64LE>() const;
+-template uint32_t SharedSymbol::template getAlignment<ELF64BE>() const;
++template uint32_t SharedSymbol::getAlignment<ELF32LE>() const;
++template uint32_t SharedSymbol::getAlignment<ELF32BE>() const;
++template uint32_t SharedSymbol::getAlignment<ELF64LE>() const;
++template uint32_t SharedSymbol::getAlignment<ELF64BE>() const;
diff --git a/var/spack/repos/builtin/packages/llvm/llvm5-sanitizer-ustat.patch b/var/spack/repos/builtin/packages/llvm/llvm5-sanitizer-ustat.patch
new file mode 100644
index 0000000000..531a3c5d6d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/llvm5-sanitizer-ustat.patch
@@ -0,0 +1,25 @@
+# <sys/ustat.h> has been removed from glibc 2.28,
+# backport fix from llvm-6.0.1:
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
+@@ -159,1 +159,0 @@
+-#include <sys/ustat.h>
+@@ -252,5 +252,17 @@
+
+ #if SANITIZER_LINUX && !SANITIZER_ANDROID
+- unsigned struct_ustat_sz = sizeof(struct ustat);
++ // Use pre-computed size of struct ustat to avoid <sys/ustat.h> which
++ // has been removed from glibc 2.28.
++#if defined(__aarch64__) || defined(__s390x__) || defined (__mips64) \
++ || defined(__powerpc64__) || defined(__arch64__) || defined(__sparcv9) \
++ || defined(__x86_64__)
++#define SIZEOF_STRUCT_USTAT 32
++#elif defined(__arm__) || defined(__i386__) || defined(__mips__) \
++ || defined(__powerpc__) || defined(__s390__)
++#define SIZEOF_STRUCT_USTAT 20
++#else
++#error Unknown size of struct ustat
++#endif
++ unsigned struct_ustat_sz = SIZEOF_STRUCT_USTAT;
+ unsigned struct_rlimit64_sz = sizeof(struct rlimit64);
+ unsigned struct_statvfs64_sz = sizeof(struct statvfs64);
diff --git a/var/spack/repos/builtin/packages/llvm/missing-includes.patch b/var/spack/repos/builtin/packages/llvm/missing-includes.patch
new file mode 100644
index 0000000000..e88b8fcfde
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/missing-includes.patch
@@ -0,0 +1,23 @@
+# https://github.com/spack/spack/issues/24270 (This hunk is upstream since llvm-10)
+--- a/llvm/include/llvm/Demangle/MicrosoftDemangleNodes.h
++++ b/llvm/include/llvm/Demangle/MicrosoftDemangleNodes.h
+@@ -4,6 +4,8 @@
+ #include "llvm/Demangle/Compiler.h"
+ #include "llvm/Demangle/StringView.h"
+ #include <array>
++#include <cstdint>
++#include <string>
+
+ class OutputStream;
+
+# https://github.com/spack/spack/pull/27233
+--- a/llvm/utils/benchmark/src/benchmark_register.h
++++ b/llvm/utils/benchmark/src/benchmark_register.h
+@@ -2,6 +2,7 @@
+ #define BENCHMARK_REGISTER_H
+
+ #include <vector>
++#include <limits>
+
+ #include "check.h"
+
diff --git a/var/spack/repos/builtin/packages/llvm/no_cyclades.patch b/var/spack/repos/builtin/packages/llvm/no_cyclades.patch
new file mode 100644
index 0000000000..10f9d0796b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/no_cyclades.patch
@@ -0,0 +1,81 @@
+diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc b/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc
+@@ -370,15 +370,6 @@
+
+ #if SANITIZER_GLIBC
+ // _(SIOCDEVPLIP, WRITE, struct_ifreq_sz); // the same as EQL_ENSLAVE
+- _(CYGETDEFTHRESH, WRITE, sizeof(int));
+- _(CYGETDEFTIMEOUT, WRITE, sizeof(int));
+- _(CYGETMON, WRITE, struct_cyclades_monitor_sz);
+- _(CYGETTHRESH, WRITE, sizeof(int));
+- _(CYGETTIMEOUT, WRITE, sizeof(int));
+- _(CYSETDEFTHRESH, NONE, 0);
+- _(CYSETDEFTIMEOUT, NONE, 0);
+- _(CYSETTHRESH, NONE, 0);
+- _(CYSETTIMEOUT, NONE, 0);
+ _(EQL_EMANCIPATE, WRITE, struct_ifreq_sz);
+ _(EQL_ENSLAVE, WRITE, struct_ifreq_sz);
+ _(EQL_GETMASTRCFG, WRITE, struct_ifreq_sz);
+diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h
+@@ -983,7 +983,6 @@
+
+ #if SANITIZER_LINUX && !SANITIZER_ANDROID
+ extern unsigned struct_ax25_parms_struct_sz;
+-extern unsigned struct_cyclades_monitor_sz;
+ extern unsigned struct_input_keymap_entry_sz;
+ extern unsigned struct_ipx_config_data_sz;
+ extern unsigned struct_kbdiacrs_sz;
+@@ -1328,15 +1327,6 @@
+ #endif // SANITIZER_LINUX
+
+ #if SANITIZER_LINUX && !SANITIZER_ANDROID
+-extern unsigned IOCTL_CYGETDEFTHRESH;
+-extern unsigned IOCTL_CYGETDEFTIMEOUT;
+-extern unsigned IOCTL_CYGETMON;
+-extern unsigned IOCTL_CYGETTHRESH;
+-extern unsigned IOCTL_CYGETTIMEOUT;
+-extern unsigned IOCTL_CYSETDEFTHRESH;
+-extern unsigned IOCTL_CYSETDEFTIMEOUT;
+-extern unsigned IOCTL_CYSETTHRESH;
+-extern unsigned IOCTL_CYSETTIMEOUT;
+ extern unsigned IOCTL_EQL_EMANCIPATE;
+ extern unsigned IOCTL_EQL_ENSLAVE;
+ extern unsigned IOCTL_EQL_GETMASTRCFG;
+diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp
+@@ -143,7 +143,6 @@
+ # include <sys/procfs.h>
+ #endif
+ #include <sys/user.h>
+-#include <linux/cyclades.h>
+ #include <linux/if_eql.h>
+ #include <linux/if_plip.h>
+ #include <linux/lp.h>
+@@ -460,7 +459,6 @@
+
+ #if SANITIZER_GLIBC
+ unsigned struct_ax25_parms_struct_sz = sizeof(struct ax25_parms_struct);
+- unsigned struct_cyclades_monitor_sz = sizeof(struct cyclades_monitor);
+ #if EV_VERSION > (0x010000)
+ unsigned struct_input_keymap_entry_sz = sizeof(struct input_keymap_entry);
+ #else
+@@ -824,15 +822,6 @@
+ #endif // SANITIZER_LINUX
+
+ #if SANITIZER_LINUX && !SANITIZER_ANDROID
+- unsigned IOCTL_CYGETDEFTHRESH = CYGETDEFTHRESH;
+- unsigned IOCTL_CYGETDEFTIMEOUT = CYGETDEFTIMEOUT;
+- unsigned IOCTL_CYGETMON = CYGETMON;
+- unsigned IOCTL_CYGETTHRESH = CYGETTHRESH;
+- unsigned IOCTL_CYGETTIMEOUT = CYGETTIMEOUT;
+- unsigned IOCTL_CYSETDEFTHRESH = CYSETDEFTHRESH;
+- unsigned IOCTL_CYSETDEFTIMEOUT = CYSETDEFTIMEOUT;
+- unsigned IOCTL_CYSETTHRESH = CYSETTHRESH;
+- unsigned IOCTL_CYSETTIMEOUT = CYSETTIMEOUT;
+ unsigned IOCTL_EQL_EMANCIPATE = EQL_EMANCIPATE;
+ unsigned IOCTL_EQL_ENSLAVE = EQL_ENSLAVE;
+ unsigned IOCTL_EQL_GETMASTRCFG = EQL_GETMASTRCFG;
diff --git a/var/spack/repos/builtin/packages/llvm/no_cyclades9.patch b/var/spack/repos/builtin/packages/llvm/no_cyclades9.patch
new file mode 100644
index 0000000000..7532865e6d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/no_cyclades9.patch
@@ -0,0 +1,42 @@
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc
+@@ -370,9 +370,0 @@
+- _(CYGETDEFTHRESH, WRITE, sizeof(int));
+- _(CYGETDEFTIMEOUT, WRITE, sizeof(int));
+- _(CYGETMON, WRITE, struct_cyclades_monitor_sz);
+- _(CYGETTHRESH, WRITE, sizeof(int));
+- _(CYGETTIMEOUT, WRITE, sizeof(int));
+- _(CYSETDEFTHRESH, NONE, 0);
+- _(CYSETDEFTIMEOUT, NONE, 0);
+- _(CYSETTHRESH, NONE, 0);
+- _(CYSETTIMEOUT, NONE, 0);
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h
+@@ -986,1 +986,0 @@
+- extern unsigned struct_cyclades_monitor_sz;
+@@ -1331,9 +1327,0 @@
+- extern unsigned IOCTL_CYGETDEFTHRESH;
+- extern unsigned IOCTL_CYGETDEFTIMEOUT;
+- extern unsigned IOCTL_CYGETMON;
+- extern unsigned IOCTL_CYGETTHRESH;
+- extern unsigned IOCTL_CYGETTIMEOUT;
+- extern unsigned IOCTL_CYSETDEFTHRESH;
+- extern unsigned IOCTL_CYSETDEFTIMEOUT;
+- extern unsigned IOCTL_CYSETTHRESH;
+- extern unsigned IOCTL_CYSETTIMEOUT;
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
+@@ -143,1 +143,0 @@
+-#include <linux/cyclades.h>
+@@ -460,1 +459,0 @@
+- unsigned struct_cyclades_monitor_sz = sizeof(struct cyclades_monitor);
+@@ -824,9 +822,0 @@
+- unsigned IOCTL_CYGETDEFTHRESH = CYGETDEFTHRESH;
+- unsigned IOCTL_CYGETDEFTIMEOUT = CYGETDEFTIMEOUT;
+- unsigned IOCTL_CYGETMON = CYGETMON;
+- unsigned IOCTL_CYGETTHRESH = CYGETTHRESH;
+- unsigned IOCTL_CYGETTIMEOUT = CYGETTIMEOUT;
+- unsigned IOCTL_CYSETDEFTHRESH = CYSETDEFTHRESH;
+- unsigned IOCTL_CYSETDEFTIMEOUT = CYSETDEFTIMEOUT;
+- unsigned IOCTL_CYSETTHRESH = CYSETTHRESH;
+- unsigned IOCTL_CYSETTIMEOUT = CYSETTIMEOUT;
diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py
index c5a88cd9fb..8d353b071b 100644
--- a/var/spack/repos/builtin/packages/llvm/package.py
+++ b/var/spack/repos/builtin/packages/llvm/package.py
@@ -2,12 +2,14 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
import os.path
import re
import sys
import llnl.util.tty as tty
+import spack.build_environment
import spack.util.executable
@@ -23,10 +25,12 @@ class Llvm(CMakePackage, CudaPackage):
url = "https://github.com/llvm/llvm-project/archive/llvmorg-7.1.0.tar.gz"
list_url = "https://releases.llvm.org/download.html"
git = "https://github.com/llvm/llvm-project"
- maintainers = ['trws', 'naromero77']
+ maintainers = ['trws', 'haampie']
tags = ['e4s']
+ generator = 'Ninja'
+
family = "compiler" # Used by lmod
# fmt: off
@@ -129,6 +133,11 @@ class Llvm(CMakePackage, CudaPackage):
"components in a single shared library",
)
variant(
+ "link_llvm_dylib",
+ default=False,
+ description="Link LLVM tools against the LLVM shared library",
+ )
+ variant(
"all_targets",
default=False,
description="Build all supported targets, default targets "
@@ -154,10 +163,15 @@ class Llvm(CMakePackage, CudaPackage):
description="Enable code-signing on macOS")
variant("python", default=False, description="Install python bindings")
+ variant('version_suffix', default='none', description="Add a symbol suffix")
+ variant('z3', default=False, description='Use Z3 for the clang static analyzer')
+
extends("python", when="+python")
# Build dependency
depends_on("cmake@3.4.3:", type="build")
+ depends_on('cmake@3.13.4:', type='build', when='@12:')
+ depends_on("ninja", type="build")
depends_on("python@2.7:2.8", when="@:4 ~python", type="build")
depends_on("python", when="@5: ~python", type="build")
depends_on("pkgconfig", type="build")
@@ -165,7 +179,7 @@ class Llvm(CMakePackage, CudaPackage):
# Universal dependency
depends_on("python@2.7:2.8", when="@:4+python")
depends_on("python", when="@5:+python")
- depends_on("z3", when="@9:")
+ depends_on('z3', when='@8:+clang+z3')
# openmp dependencies
depends_on("perl-data-dumper", type=("build"))
@@ -189,6 +203,7 @@ class Llvm(CMakePackage, CudaPackage):
depends_on("isl", when="@:3.6 +polly")
conflicts("+llvm_dylib", when="+shared_libs")
+ conflicts("+link_llvm_dylib", when="~llvm_dylib")
conflicts("+lldb", when="~clang")
conflicts("+libcxx", when="~clang")
conflicts("+internal_unwind", when="~clang")
@@ -197,11 +212,16 @@ class Llvm(CMakePackage, CudaPackage):
# Introduced in version 11 as a part of LLVM and not a separate package.
conflicts("+flang", when="@:10")
+ conflicts('~mlir', when='+flang', msg='Flang requires MLIR')
+
# Older LLVM do not build with newer compilers, and vice versa
- conflicts("%gcc@11:", when="@:7")
conflicts("%gcc@8:", when="@:5")
conflicts("%gcc@:5.0", when="@8:")
- conflicts("%apple-clang@13:", when="@:9")
+ # clang/lib: a lambda parameter cannot shadow an explicitly captured entity
+ conflicts("%clang@8:", when="@:4")
+
+ # When these versions are concretized, but not explicitly with +libcxx, these
+ # conflicts will enable clingo to set ~libcxx, making the build successful:
# libc++ of LLVM13, see https://libcxx.llvm.org/#platform-and-compiler-support
# @13 does not support %gcc@:10 https://bugs.llvm.org/show_bug.cgi?id=51359#c1
@@ -210,7 +230,15 @@ class Llvm(CMakePackage, CudaPackage):
# AppleClang 12 - latest stable release per Xcode release page
conflicts("%gcc@:10", when="@13:+libcxx")
conflicts("%clang@:10", when="@13:+libcxx")
- conflicts("%apple_clang@:11", when="@13:+libcxx")
+ conflicts("%apple-clang@:11", when="@13:+libcxx")
+
+ # libcxx-4 and compiler-rt-4 fail to build with "newer" clang and gcc versions:
+ conflicts('%gcc@7:', when='@:4+libcxx')
+ conflicts('%clang@6:', when='@:4+libcxx')
+ conflicts('%apple-clang@6:', when='@:4+libcxx')
+ conflicts('%gcc@7:', when='@:4+compiler-rt')
+ conflicts('%clang@6:', when='@:4+compiler-rt')
+ conflicts('%apple-clang@6:', when='@:4+compiler-rt')
# OMP TSAN exists in > 5.x
conflicts("+omp_tsan", when="@:5")
@@ -246,10 +274,22 @@ class Llvm(CMakePackage, CudaPackage):
# Github issue #4986
patch("llvm_gcc7.patch", when="@4.0.0:4.0.1+lldb %gcc@7.0:")
- # https://github.com/spack/spack/issues/24270
- patch('https://src.fedoraproject.org/rpms/llvm10/raw/7ce7ebd066955ea95ba2b491c41fbc6e4ee0643a/f/llvm10-gcc11.patch',
- sha256='958c64838c9d469be514eef195eca0f8c3ab069bc4b64a48fad59991c626bab8',
- when='@8:11 %gcc@11:')
+ # sys/ustat.h has been removed in favour of statfs from glibc-2.28. Use fixed sizes:
+ patch('llvm5-sanitizer-ustat.patch', when="@4:6+compiler-rt")
+
+ # Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463
+ patch('llvm4-lld-ELF-Symbols.patch', when="@4+lld%clang@6:")
+ patch('llvm5-lld-ELF-Symbols.patch', when="@5+lld%clang@7:")
+
+ # Fix missing std:size_t in 'llvm@4:5' when built with '%clang@7:'
+ patch('xray_buffer_queue-cstddef.patch', when="@4:5+compiler-rt%clang@7:")
+
+ # https://github.com/llvm/llvm-project/commit/947f9692440836dcb8d88b74b69dd379d85974ce
+ patch('sanitizer-ipc_perm_mode.patch', when="@5:7+compiler-rt%clang@11:")
+ patch('sanitizer-ipc_perm_mode.patch', when="@5:9+compiler-rt%gcc@9:")
+
+ # github.com/spack/spack/issues/24270: MicrosoftDemangle for %gcc@10: and %clang@13:
+ patch('missing-includes.patch', when='@8:9')
# Backport from llvm master + additional fix
# see https://bugs.llvm.org/show_bug.cgi?id=39696
@@ -276,6 +316,11 @@ class Llvm(CMakePackage, CudaPackage):
# Workaround for issue https://github.com/spack/spack/issues/18197
patch('llvm7_intel.patch', when='@7 %intel@18.0.2,19.0.4')
+ # Remove cyclades support to build against newer kernel headers
+ # https://reviews.llvm.org/D102059
+ patch('no_cyclades.patch', when='@10:12.0.0')
+ patch('no_cyclades9.patch', when='@6:9')
+
# The functions and attributes below implement external package
# detection for LLVM. See:
#
@@ -439,6 +484,17 @@ class Llvm(CMakePackage, CudaPackage):
return(None, flags, None)
return(flags, None, None)
+ def setup_build_environment(self, env):
+ """When using %clang, add only its ld.lld-$ver and/or ld.lld to our PATH"""
+ if self.compiler.name in ['clang', 'apple-clang']:
+ for lld in 'ld.lld-{0}'.format(self.compiler.version.version[0]), 'ld.lld':
+ bin = os.path.join(os.path.dirname(self.compiler.cc), lld)
+ sym = os.path.join(self.stage.path, 'ld.lld')
+ if os.path.exists(bin) and not os.path.exists(sym):
+ mkdirp(self.stage.path)
+ os.symlink(bin, sym)
+ env.prepend_path('PATH', self.stage.path)
+
def setup_run_environment(self, env):
if "+clang" in self.spec:
env.set("CC", join_path(self.spec.prefix.bin, "clang"))
@@ -465,6 +521,10 @@ class Llvm(CMakePackage, CudaPackage):
define("LIBOMP_HWLOC_INSTALL_DIR", spec["hwloc"].prefix),
]
+ version_suffix = spec.variants['version_suffix'].value
+ if version_suffix != 'none':
+ cmake_args.append(define('LLVM_VERSION_SUFFIX', version_suffix))
+
if python.version >= Version("3"):
cmake_args.append(define("Python3_EXECUTABLE", python.command.path))
else:
@@ -521,6 +581,13 @@ class Llvm(CMakePackage, CudaPackage):
else:
projects.append("openmp")
+ if self.spec.satisfies("@8"):
+ cmake_args.append(define('CLANG_ANALYZER_ENABLE_Z3_SOLVER',
+ self.spec.satisfies('@8+z3')))
+ if self.spec.satisfies("@9:"):
+ cmake_args.append(define('LLVM_ENABLE_Z3_SOLVER',
+ self.spec.satisfies('@9:+z3')))
+
if "+flang" in spec:
projects.append("flang")
if "+lldb" in spec:
@@ -540,9 +607,16 @@ class Llvm(CMakePackage, CudaPackage):
projects.append("polly")
cmake_args.append(define("LINK_POLLY_INTO_TOOLS", True))
- cmake_args.append(from_variant("BUILD_SHARED_LIBS", "shared_libs"))
- cmake_args.append(from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"))
- cmake_args.append(from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"))
+ cmake_args.extend([
+ from_variant("BUILD_SHARED_LIBS", "shared_libs"),
+ from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
+ from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
+ from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
+ # By default on Linux, libc++.so is a ldscript. CMake fails to add
+ # CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
+ # into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
+ define('LIBCXX_ENABLE_STATIC_ABI_LIBRARY', True)
+ ])
if "+all_targets" not in spec: # all is default on cmake
@@ -581,12 +655,6 @@ class Llvm(CMakePackage, CudaPackage):
break
cmake_args.append(define("GCC_INSTALL_PREFIX", gcc_prefix))
- if spec.satisfies("@4.0.0:"):
- if spec.satisfies("platform=cray") or spec.satisfies(
- "platform=linux"
- ):
- cmake_args.append(define("CMAKE_BUILD_WITH_INSTALL_RPATH", "1"))
-
if self.spec.satisfies("~code_signing platform=darwin"):
cmake_args.append(define('LLDB_USE_SYSTEM_DEBUGSERVER', True))
@@ -599,43 +667,37 @@ class Llvm(CMakePackage, CudaPackage):
return cmake_args
- @run_before("build")
- def pre_install(self):
- with working_dir(self.build_directory):
- # When building shared libraries these need to be installed first
- make("install-LLVMTableGen")
- if self.spec.version >= Version("4.0.0"):
- # LLVMDemangle target was added in 4.0.0
- make("install-LLVMDemangle")
- make("install-LLVMSupport")
-
@run_after("install")
def post_install(self):
spec = self.spec
define = CMakePackage.define
# unnecessary if we build openmp via LLVM_ENABLE_RUNTIMES
- if "+cuda" in self.spec and "+omp_as_runtime" not in self.spec:
+ if "+cuda ~omp_as_runtime" in self.spec:
ompdir = "build-bootstrapped-omp"
+ prefix_paths = spack.build_environment.get_cmake_prefix_path(self)
+ prefix_paths.append(str(spec.prefix))
# rebuild libomptarget to get bytecode runtime library files
with working_dir(ompdir, create=True):
cmake_args = [
- self.stage.source_path + "/openmp",
+ '-G', 'Ninja',
+ define('CMAKE_BUILD_TYPE', spec.variants['build_type'].value),
define("CMAKE_C_COMPILER", spec.prefix.bin + "/clang"),
define("CMAKE_CXX_COMPILER", spec.prefix.bin + "/clang++"),
define("CMAKE_INSTALL_PREFIX", spec.prefix),
+ define('CMAKE_PREFIX_PATH', prefix_paths)
]
cmake_args.extend(self.cmake_args())
- cmake_args.append(define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB",
- True))
-
- # work around bad libelf detection in libomptarget
- cmake_args.append(define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
- spec["libelf"].prefix.include))
+ cmake_args.extend([
+ define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
+ define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
+ spec["libelf"].prefix.include),
+ self.stage.source_path + "/openmp",
+ ])
cmake(*cmake_args)
- make()
- make("install")
+ ninja()
+ ninja("install")
if "+python" in self.spec:
install_tree("llvm/bindings/python", site_packages_dir)
diff --git a/var/spack/repos/builtin/packages/llvm/sanitizer-ipc_perm_mode.patch b/var/spack/repos/builtin/packages/llvm/sanitizer-ipc_perm_mode.patch
new file mode 100644
index 0000000000..8f91703a32
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/sanitizer-ipc_perm_mode.patch
@@ -0,0 +1,9 @@
+# ipc_perm.mode is not used and has changed from short to int over architecures
+# and versions. The last change was in glibc-2.31.
+# LLVM upstream decided to not check ipc_perm.mode below glibc-2.31,
+# because it is not actually used in the sanitizer:
+# github.com/llvm/llvm-project/commit/947f9692440836dcb8d88b74b69dd379d85974ce
+--- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
++++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
+@@ -1143,1 +1143,0 @@
+-CHECK_SIZE_AND_OFFSET(ipc_perm, mode);
diff --git a/var/spack/repos/builtin/packages/llvm/xray_buffer_queue-cstddef.patch b/var/spack/repos/builtin/packages/llvm/xray_buffer_queue-cstddef.patch
new file mode 100644
index 0000000000..b5ca841e3e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/xray_buffer_queue-cstddef.patch
@@ -0,0 +1,5 @@
+# Fix missing std:size_t in 'llvm@4:5' for build with '%clang@7:'
+--- a/compiler-rt/lib/xray/xray_buffer_queue.h
++++ b/compiler-rt/lib/xray/xray_buffer_queue.h
+@@ -18,0 +18,1 @@
++#include <cstddef>
diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py
index 7b5ebb7468..21544be611 100644
--- a/var/spack/repos/builtin/packages/lmod/package.py
+++ b/var/spack/repos/builtin/packages/lmod/package.py
@@ -19,6 +19,7 @@ class Lmod(AutotoolsPackage):
homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod'
url = "https://github.com/TACC/Lmod/archive/8.5.6.tar.gz"
+ version('8.5.27', sha256='bec911ff6b20de7d38587d1f9c351f58ed7bdf10cb3938089c82944b5ee0ab0d')
version('8.5.6', sha256='1d1058ffa33a661994c1b2af4bfee4aa1539720cd5c13d61e18adbfb231bbe88')
version('8.3', sha256='c2c2e9e6b387b011ee617cb009a2199caac8bf200330cb8a065ceedee09e664a')
version('8.2.10', sha256='15676d82235faf5c755a747f0e318badb1a5c3ff1552fa8022c67ff083ee9e2f')
diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py
index c3b021d96f..4263fdc5da 100644
--- a/var/spack/repos/builtin/packages/lua/package.py
+++ b/var/spack/repos/builtin/packages/lua/package.py
@@ -63,10 +63,10 @@ class Lua(Package):
else:
target = 'linux'
make('INSTALL_TOP=%s' % prefix,
- 'MYLDFLAGS=-L%s -L%s' % (
- spec['readline'].prefix.lib,
- spec['ncurses'].prefix.lib),
- 'MYLIBS=-lncursesw -ltinfow',
+ 'MYLDFLAGS=' + ' '.join((
+ spec['readline'].libs.search_flags,
+ spec['ncurses'].libs.search_flags)),
+ 'MYLIBS=%s' % spec['ncurses'].libs.link_flags,
'CC=%s -std=gnu99 %s' % (spack_cc,
self.compiler.cc_pic_flag),
target)
diff --git a/var/spack/repos/builtin/packages/lvm2/package.py b/var/spack/repos/builtin/packages/lvm2/package.py
index 64d1f43f4b..c07de19114 100644
--- a/var/spack/repos/builtin/packages/lvm2/package.py
+++ b/var/spack/repos/builtin/packages/lvm2/package.py
@@ -18,8 +18,9 @@ class Lvm2(AutotoolsPackage, SourcewarePackage):
"""
homepage = "https://www.sourceware.org/lvm2"
- sourceware_mirror_path = "lvm2/LVM2.2.03.05.tgz"
+ sourceware_mirror_path = 'lvm2/LVM2.2.03.14.tgz'
+ version('2.03.14', sha256='4a63bc8a084a8ae3c7bc5e6530cac264139d218575c64416c8b99e3fe039a05c')
version('2.03.05', sha256='ca52815c999b20c6d25e3192f142f081b93d01f07b9d787e99664b169dba2700')
version('2.03.04', sha256='f151f36fc0039997d2d9369b607b9262568b1a268afe19fd1535807355402142')
version('2.03.03', sha256='cedefa63ec5ae1b62fedbfddfc30706c095be0fc7c6aaed6fd1c50bc8c840dde')
diff --git a/var/spack/repos/builtin/packages/madgraph5amc/package.py b/var/spack/repos/builtin/packages/madgraph5amc/package.py
index fcfcbc1f8e..8d5ef29444 100644
--- a/var/spack/repos/builtin/packages/madgraph5amc/package.py
+++ b/var/spack/repos/builtin/packages/madgraph5amc/package.py
@@ -90,6 +90,9 @@ class Madgraph5amc(Package):
make(parallel=False)
with working_dir(join_path('vendor', 'StdHEP')):
+ for m in ['mcfio/arch_mcfio', 'src/stdhep_arch']:
+ arch = FileFilter(m)
+ arch.filter('CC.*=.*', 'CC = {0}'.format(spack_cc))
make(parallel=False)
if '+atlas' in spec:
diff --git a/var/spack/repos/builtin/packages/maker/package.py b/var/spack/repos/builtin/packages/maker/package.py
index 1416e21916..eb5ba97412 100644
--- a/var/spack/repos/builtin/packages/maker/package.py
+++ b/var/spack/repos/builtin/packages/maker/package.py
@@ -49,6 +49,7 @@ class Maker(Package):
depends_on('perl', type=('build', 'run'))
depends_on('perl-module-build', type='build')
depends_on('perl-dbi', type=('build', 'run'))
+ depends_on('perl-dbd-mysql', type=('build', 'run'))
depends_on('perl-dbd-pg', type=('build', 'run'))
depends_on('perl-dbd-sqlite', type=('build', 'run'))
depends_on('perl-forks', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/mariadb-c-client/package.py b/var/spack/repos/builtin/packages/mariadb-c-client/package.py
index e6f2e0904b..0cc9690e86 100644
--- a/var/spack/repos/builtin/packages/mariadb-c-client/package.py
+++ b/var/spack/repos/builtin/packages/mariadb-c-client/package.py
@@ -57,6 +57,7 @@ class MariadbCClient(CMakePackage):
depends_on('pcre')
depends_on('openssl')
depends_on('zlib')
+ depends_on('krb5')
# patch needed for cmake-3.20
patch('https://github.com/mariadb-corporation/mariadb-connector-c/commit/242cab8c.patch',
diff --git a/var/spack/repos/builtin/packages/mbedtls/fix-dt-needed-shared-libs.patch b/var/spack/repos/builtin/packages/mbedtls/fix-dt-needed-shared-libs.patch
new file mode 100644
index 0000000000..f57d437825
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mbedtls/fix-dt-needed-shared-libs.patch
@@ -0,0 +1,75 @@
+From ae52fe2bd1016db816ec9a531fa1103f717c7441 Mon Sep 17 00:00:00 2001
+From: Harmen Stoppels <harmenstoppels@gmail.com>
+Date: Mon, 8 Nov 2021 15:41:49 +0100
+Subject: [PATCH] Fix DT_NEEDED
+
+---
+ library/Makefile | 14 +++++++-------
+ 1 file changed, 7 insertions(+), 7 deletions(-)
+
+diff --git a/library/Makefile b/library/Makefile
+index 0ee6e4f36..b6604e69b 100644
+--- a/library/Makefile
++++ b/library/Makefile
+@@ -196,7 +196,7 @@ endif
+
+ libmbedtls.$(SOEXT_TLS): $(OBJS_TLS) libmbedx509.so
+ echo " LD $@"
+- $(CC) -shared -Wl,-soname,$@ -L. -lmbedcrypto -lmbedx509 $(LOCAL_LDFLAGS) $(LDFLAGS) -o $@ $(OBJS_TLS)
++ $(CC) -shared -Wl,-soname,$@ -o $@ $(OBJS_TLS) -L. -lmbedx509 -lmbedcrypto $(LOCAL_LDFLAGS) $(LDFLAGS)
+
+ libmbedtls.so: libmbedtls.$(SOEXT_TLS)
+ echo " LN $@ -> $<"
+@@ -204,11 +204,11 @@ libmbedtls.so: libmbedtls.$(SOEXT_TLS)
+
+ libmbedtls.dylib: $(OBJS_TLS) libmbedx509.dylib
+ echo " LD $@"
+- $(CC) -dynamiclib -L. -lmbedcrypto -lmbedx509 $(LOCAL_LDFLAGS) $(LDFLAGS) -o $@ $(OBJS_TLS)
++ $(CC) -dynamiclib -o $@ $(OBJS_TLS) -L. -lmbedx509 -lmbedcrypto $(LOCAL_LDFLAGS) $(LDFLAGS)
+
+ libmbedtls.dll: $(OBJS_TLS) libmbedx509.dll
+ echo " LD $@"
+- $(CC) -shared -Wl,-soname,$@ -Wl,--out-implib,$@.a -o $@ $(OBJS_TLS) -lws2_32 -lwinmm -lgdi32 -L. -lmbedcrypto -lmbedx509 -static-libgcc $(LOCAL_LDFLAGS) $(LDFLAGS)
++ $(CC) -shared -Wl,-soname,$@ -Wl,--out-implib,$@.a -o $@ $(OBJS_TLS) -lws2_32 -lwinmm -lgdi32 -L. -lmbedx509 -lmbedcrypto -static-libgcc $(LOCAL_LDFLAGS) $(LDFLAGS)
+
+ # x509
+ libmbedx509.a: $(OBJS_X509)
+@@ -223,7 +223,7 @@ endif
+
+ libmbedx509.$(SOEXT_X509): $(OBJS_X509) libmbedcrypto.so
+ echo " LD $@"
+- $(CC) -shared -Wl,-soname,$@ -L. -lmbedcrypto $(LOCAL_LDFLAGS) $(LDFLAGS) -o $@ $(OBJS_X509)
++ $(CC) -shared -Wl,-soname,$@ -o $@ $(OBJS_X509) -L. -lmbedcrypto $(LOCAL_LDFLAGS) $(LDFLAGS)
+
+ libmbedx509.so: libmbedx509.$(SOEXT_X509)
+ echo " LN $@ -> $<"
+@@ -231,7 +231,7 @@ libmbedx509.so: libmbedx509.$(SOEXT_X509)
+
+ libmbedx509.dylib: $(OBJS_X509) libmbedcrypto.dylib
+ echo " LD $@"
+- $(CC) -dynamiclib -L. -lmbedcrypto $(LOCAL_LDFLAGS) $(LDFLAGS) -o $@ $(OBJS_X509)
++ $(CC) -dynamiclib -o $@ $(OBJS_X509) -L. -lmbedcrypto $(LOCAL_LDFLAGS) $(LDFLAGS)
+
+ libmbedx509.dll: $(OBJS_X509) libmbedcrypto.dll
+ echo " LD $@"
+@@ -250,7 +250,7 @@ endif
+
+ libmbedcrypto.$(SOEXT_CRYPTO): $(OBJS_CRYPTO)
+ echo " LD $@"
+- $(CC) -shared -Wl,-soname,$@ $(LOCAL_LDFLAGS) $(LDFLAGS) -o $@ $(OBJS_CRYPTO)
++ $(CC) -shared -Wl,-soname,$@ -o $@ $(OBJS_CRYPTO) $(LOCAL_LDFLAGS) $(LDFLAGS)
+
+ libmbedcrypto.so: libmbedcrypto.$(SOEXT_CRYPTO)
+ echo " LN $@ -> $<"
+@@ -258,7 +258,7 @@ libmbedcrypto.so: libmbedcrypto.$(SOEXT_CRYPTO)
+
+ libmbedcrypto.dylib: $(OBJS_CRYPTO)
+ echo " LD $@"
+- $(CC) -dynamiclib $(LOCAL_LDFLAGS) $(LDFLAGS) -o $@ $(OBJS_CRYPTO)
++ $(CC) -dynamiclib -o $@ $(OBJS_CRYPTO) $(LOCAL_LDFLAGS) $(LDFLAGS)
+
+ libmbedcrypto.dll: $(OBJS_CRYPTO)
+ echo " LD $@"
+--
+2.25.1
+
diff --git a/var/spack/repos/builtin/packages/mbedtls/package.py b/var/spack/repos/builtin/packages/mbedtls/package.py
index d09ec5b43f..e45ecb5460 100644
--- a/var/spack/repos/builtin/packages/mbedtls/package.py
+++ b/var/spack/repos/builtin/packages/mbedtls/package.py
@@ -15,10 +15,11 @@ class Mbedtls(MakefilePackage):
homepage = "https://tls.mbed.org"
url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
- maintainers = ['mwkrentel']
+ maintainers = ['mwkrentel', 'haampie']
version('3.0.0', sha256='377d376919be19f07c7e7adeeded088a525be40353f6d938a78e4f986bce2ae0')
version('2.27.0', sha256='4f6a43f06ded62aa20ef582436a39b65902e1126cbbe2fb17f394e9e9a552767')
+ version('2.24.0', sha256='b5a779b5f36d5fc4cba55faa410685f89128702423ad07b36c5665441a06a5f3')
version('2.16.11', sha256='51bb9685c4f4ff9255da5659ff346b89dcaf129e3ba0f3b2b0c48a1a7495e701')
version('2.16.9', sha256='b7ca99ee10551b5b13242b7effebefd2a5cc38c287e5f5be1267d51ee45effe3', deprecated=True)
version('2.16.7', sha256='4786b7d1676f5e4d248f3a7f2d28446876d64962634f060ff21b92c690cfbe86', deprecated=True)
@@ -42,6 +43,10 @@ class Mbedtls(MakefilePackage):
depends_on('python@3:', type='test', when='@3:')
depends_on('python@:2', type='test', when='@:2')
+ # See https://github.com/ARMmbed/mbedtls/pull/5126
+ # and the 2.x backport: https://github.com/ARMmbed/mbedtls/pull/5133
+ patch('fix-dt-needed-shared-libs.patch', when='@2.7:3.0.0')
+
build_type_to_flags = {
'Debug': '-O0 -g',
'Release': '-O3',
diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py
index d7bfb0df83..9710da837d 100644
--- a/var/spack/repos/builtin/packages/meep/package.py
+++ b/var/spack/repos/builtin/packages/meep/package.py
@@ -11,31 +11,50 @@ class Meep(AutotoolsPackage):
software package developed at MIT to model electromagnetic systems."""
homepage = "http://ab-initio.mit.edu/wiki/index.php/Meep"
- url = "http://ab-initio.mit.edu/meep/meep-1.3.tar.gz"
- list_url = "http://ab-initio.mit.edu/meep/old"
-
- version('1.3', sha256='564c1ff1b413a3487cf81048a45deabfdac4243a1a37ce743f4fcf0c055fd438')
- version('1.2.1', sha256='f1f0683e5688d231f7dd1863939677148fc27a6744c03510e030c85d6c518ea5')
- version('1.1.1', sha256='7a97b5555da1f9ea2ec6eed5c45bd97bcd6ddbd54bdfc181f46c696dffc169f2')
-
- variant('blas', default=True, description='Enable BLAS support')
- variant('lapack', default=True, description='Enable LAPACK support')
- variant('harminv', default=True, description='Enable Harminv support')
- variant('guile', default=True, description='Enable Guilde support')
- variant('libctl', default=True, description='Enable libctl support')
- variant('mpi', default=True, description='Enable MPI support')
- variant('hdf5', default=True, description='Enable HDF5 support')
- variant('gsl', default=True, description='Enable GSL support')
+ git = "https://github.com/NanoComp/meep.git"
+ url = "https://github.com/NanoComp/meep/archive/refs/tags/v1.21.0.tar.gz"
+
+ version('master', branch='master')
+
+ version('1.21.0', sha256='71911cd2f38b15bdafe9a27ad111f706f24717894d5f9b6f9f19c6c10a0d5896')
+ version('1.3', sha256='564c1ff1b413a3487cf81048a45deabfdac4243a1a37ce743f4fcf0c055fd438',
+ url='http://ab-initio.mit.edu/meep/meep-1.3.tar.gz')
+ version('1.2.1', sha256='f1f0683e5688d231f7dd1863939677148fc27a6744c03510e030c85d6c518ea5',
+ url='http://ab-initio.mit.edu/meep/meep-1.2.1.tar.gz')
+ version('1.1.1', sha256='7a97b5555da1f9ea2ec6eed5c45bd97bcd6ddbd54bdfc181f46c696dffc169f2',
+ url='http://ab-initio.mit.edu/meep/old/meep-1.1.1.tar.gz')
+
+ variant('blas', default=True, description='Enable BLAS support')
+ variant('lapack', default=True, description='Enable LAPACK support')
+ variant('harminv', default=True, description='Enable Harminv support')
+ variant('guile', default=True, description='Enable Guilde support')
+ variant('libctl', default=True, description='Enable libctl support')
+ variant('mpi', default=True, description='Enable MPI support')
+ variant('hdf5', default=True, description='Enable HDF5 support')
+ variant('gsl', default=True, description='Enable GSL support')
+ variant('python', default=True, description='Enable Python support')
+ variant('single', default=False, description='Enable Single Precision')
+
+ depends_on('autoconf', type='build', when='@1.21.0')
+ depends_on('automake', type='build', when='@1.21.0')
+ depends_on('libtool', type='build', when='@1.21.0')
depends_on('blas', when='+blas')
depends_on('lapack', when='+lapack')
depends_on('harminv', when='+harminv')
- depends_on('guile', when='+guile')
- depends_on('libctl@3.2:', when='+libctl')
+ depends_on('guile@:2', when='@:1.4+guile')
+ depends_on('guile@2:', when='@1.4:+guile')
+ depends_on('libctl@3.2', when='@:1.3+libctl')
+ depends_on('libctl@4:', when='+libctl')
depends_on('mpi', when='+mpi')
depends_on('hdf5~mpi', when='+hdf5~mpi')
depends_on('hdf5+mpi', when='+hdf5+mpi')
depends_on('gsl', when='+gsl')
+ with when('+python'):
+ depends_on('python')
+ depends_on('py-numpy')
+ depends_on('swig')
+ depends_on('py-mpi4py', when='+mpi')
def configure_args(self):
spec = self.spec
@@ -72,6 +91,18 @@ class Meep(AutotoolsPackage):
else:
config_args.append('--without-hdf5')
+ if '+python' in spec:
+ config_args.append('--with-python')
+ else:
+ config_args.append('--without-python')
+ config_args.append('--without-scheme')
+
+ if '+single' in spec:
+ config_args.append('--enable-single')
+
+ if spec.satisfies('@1.21.0:'):
+ config_args.append('--enable-maintainer-mode')
+
return config_args
def check(self):
diff --git a/var/spack/repos/builtin/packages/megahit/amd.patch b/var/spack/repos/builtin/packages/megahit/amd.patch
index 6f99f616b2..6f99f616b2 100755..100644
--- a/var/spack/repos/builtin/packages/megahit/amd.patch
+++ b/var/spack/repos/builtin/packages/megahit/amd.patch
diff --git a/var/spack/repos/builtin/packages/mercury/package.py b/var/spack/repos/builtin/packages/mercury/package.py
index e3a76d0ecf..efdc99a486 100644
--- a/var/spack/repos/builtin/packages/mercury/package.py
+++ b/var/spack/repos/builtin/packages/mercury/package.py
@@ -15,6 +15,7 @@ class Mercury(CMakePackage):
maintainers = ['soumagne']
tags = ['e4s']
version('master', branch='master', submodules=True)
+ version('2.1.0', sha256='9a58437161e9273b1b1c484d2f1a477a89eea9afe84575415025d47656f3761b')
version('2.0.1', sha256='335946d9620ac669643ffd9861a5fb3ee486834bab674b7779eaac9d6662e3fa')
version('2.0.0', sha256='9e80923712e25df56014309df70660e828dbeabbe5fcc82ee024bcc86e7eb6b7')
version('1.0.1', sha256='02febd56c401ef7afa250caf28d012b37dee842bfde7ee16fcd2f741b9cf25b3')
@@ -22,11 +23,11 @@ class Mercury(CMakePackage):
version('0.9.0', sha256='40868e141cac035213fe79400f8926823fb1f5a0651fd7027cbe162b063843ef')
variant('bmi', default=False, description='Use BMI plugin')
- variant('cci', default=False, description='Use CCI plugin')
variant('mpi', default=False, description='Use MPI plugin')
- variant('ofi', default=True, description='Use OFI libfabric plugin')
+ variant('ofi', default=True, when='@1.0.0:', description='Use OFI libfabric plugin')
# NOTE: the sm plugin does not require any package dependency.
variant('sm', default=True, description='Use shared-memory plugin')
+ variant('ucx', default=False, when='@2.1.0:', description='Use UCX plugin')
# NOTE: if boostsys is False, mercury will install its own copy
# of the preprocessor headers.
variant('boostsys', default=True,
@@ -36,7 +37,7 @@ class Mercury(CMakePackage):
# NOTE: the 'udreg' variant requires that the MPICH_GNI_NDREG_ENTRIES=1024
# environment variable be set at run time to avoid conflicts with
# Cray-MPICH if libfabric and MPI are used at the same time
- variant('udreg', default=False,
+ variant('udreg', default=False, when='@1.0.0:+ofi',
description='Enable udreg on supported Cray platforms')
variant('debug', default=False,
description='Enable Mercury to print debug output')
@@ -44,16 +45,16 @@ class Mercury(CMakePackage):
description='Checksum verify all request/response messages')
depends_on('cmake@2.8.12.2:', type='build')
- # depends_on('cci', when='+cci') # TODO: add CCI package
depends_on('bmi', when='+bmi')
depends_on('mpi', when='+mpi')
- depends_on('libfabric@1.5:', when='+ofi')
- depends_on('openpa@1.0.3:', when='%gcc@:4.8')
+ with when('+ofi'):
+ depends_on('libfabric@1.5:', when='@:2.0.1')
+ depends_on('libfabric@1.7:', when='@2.1.0:')
+ # openpa dependency is removed in 2.1.0
+ depends_on('openpa@1.0.3:', when='@:2.0.1%gcc@:4.8')
depends_on('boost@1.48:', when='+boostsys')
depends_on('boost', when='@:0.9') # internal boost headers were added in 1.0.0
-
- conflicts('+ofi', when='@:0.9') # libfabric support was added in 1.0.0
- conflicts('~ofi', when='+udreg') # udreg option is specific to OFI
+ depends_on('ucx+thread_multiple', when='+ucx')
# Fix CMake check_symbol_exists
# See https://github.com/mercury-hpc/mercury/issues/299
@@ -68,52 +69,63 @@ class Mercury(CMakePackage):
def cmake_args(self):
"""Populate cmake arguments for Mercury."""
spec = self.spec
- variant_bool = lambda feature: str(feature in spec)
+ define = self.define
+ define_from_variant = self.define_from_variant
parallel_tests = '+mpi' in spec and self.run_tests
cmake_args = [
- '-DBUILD_SHARED_LIBS:BOOL=%s' % variant_bool('+shared'),
- '-DBUILD_TESTING:BOOL=%s' % str(self.run_tests),
- '-DMERCURY_ENABLE_PARALLEL_TESTING:BOOL=%s' % str(parallel_tests),
- '-DMERCURY_USE_BOOST_PP:BOOL=ON',
- '-DMERCURY_USE_CHECKSUMS:BOOL=%s' % variant_bool('+checksum'),
- '-DMERCURY_USE_SYSTEM_MCHECKSUM:BOOL=OFF',
- '-DMERCURY_USE_XDR:BOOL=OFF',
- '-DNA_USE_BMI:BOOL=%s' % variant_bool('+bmi'),
- '-DNA_USE_CCI:BOOL=%s' % variant_bool('+cci'),
- '-DNA_USE_MPI:BOOL=%s' % variant_bool('+mpi'),
- '-DNA_USE_SM:BOOL=%s' % variant_bool('+sm'),
+ define_from_variant('BUILD_SHARED_LIBS', 'shared'),
+ define('BUILD_TESTING', self.run_tests),
+ define('MERCURY_USE_BOOST_PP', True),
+ define_from_variant('MERCURY_USE_CHECKSUMS', 'checksum'),
+ define('MERCURY_USE_SYSTEM_MCHECKSUM', False),
+ define('MERCURY_USE_XDR', False),
+ define_from_variant('NA_USE_BMI', 'bmi'),
+ define_from_variant('NA_USE_MPI', 'mpi'),
+ define_from_variant('NA_USE_SM', 'sm'),
]
+ if '@2.1.0:' in spec:
+ cmake_args.append(
+ define_from_variant('NA_USE_UCX', 'ucx')
+ )
+
if '@2.0.0:' in spec:
cmake_args.extend([
- '-DMERCURY_ENABLE_DEBUG:BOOL=%s' % variant_bool('+debug'),
+ define_from_variant('MERCURY_ENABLE_DEBUG', 'debug'),
+ define('MERCURY_TESTING_ENABLE_PARALLEL', parallel_tests),
])
# Previous versions of mercury had more extensive CMake options
if '@:1.0.1' in spec:
cmake_args.extend([
- '-DMERCURY_ENABLE_POST_LIMIT:BOOL=OFF',
- '-DMERCURY_ENABLE_VERBOSE_ERROR=%s' % variant_bool('+debug'),
- '-DMERCURY_USE_EAGER_BULK:BOOL=ON',
- '-DMERCURY_USE_SELF_FORWARD:BOOL=ON',
+ define('MERCURY_ENABLE_PARALLEL_TESTING', parallel_tests),
+ define('MERCURY_ENABLE_POST_LIMIT', False),
+ define_from_variant('MERCURY_ENABLE_VERBOSE_ERROR', 'debug'),
+ define('MERCURY_USE_EAGER_BULK', True),
+ define('MERCURY_USE_SELF_FORWARD', True),
])
if '@1.0.0:' in spec:
cmake_args.extend([
- '-DMERCURY_USE_SYSTEM_BOOST:BOOL=%s'
- % variant_bool('+boostsys'),
- '-DNA_USE_OFI:BOOL=%s' % variant_bool('+ofi'),
+ define_from_variant('MERCURY_USE_SYSTEM_BOOST', 'boostsys'),
+ define_from_variant('NA_USE_OFI', 'ofi'),
])
if '+ofi' in spec:
- cmake_args.append(
- '-DNA_OFI_GNI_USE_UDREG:BOOL=%s' % variant_bool('+udreg')
- )
+ ofi_fabrics = spec['libfabric'].variants['fabrics'].value
+ if 'gni' in ofi_fabrics:
+ cmake_args.append(
+ define_from_variant('NA_OFI_GNI_USE_UDREG', 'udreg')
+ )
if self.run_tests:
+ supported = ['sockets', 'tcp', 'verbs', 'psm2', 'gni']
+ ofi_test_fabrics = list(
+ filter(lambda x: x in supported, ofi_fabrics)
+ )
cmake_args.append(
- '-DNA_OFI_TESTING_PROTOCOL:STRING={0}'.format(
- ';'.join(spec['libfabric'].variants['fabrics'].value)
+ define('NA_OFI_TESTING_PROTOCOL', format(
+ ';'.join(ofi_test_fabrics))
)
)
diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py
index cd1dc46b36..7460269d40 100644
--- a/var/spack/repos/builtin/packages/mesa/package.py
+++ b/var/spack/repos/builtin/packages/mesa/package.py
@@ -19,6 +19,10 @@ class Mesa(MesonPackage):
url = "https://archive.mesa3d.org/mesa-20.2.1.tar.xz"
version('master', tag='master')
+ version('21.3.1', sha256='2b0dc2540cb192525741d00f706dbc4586349185dafc65729c7fda0800cc474d')
+ version('21.2.6', sha256='1e7e22d93c6e8859fa044b1121119d26b2e67e4184b92ebb81c66497dc80c954')
+ version('21.2.5', sha256='8e49585fb760d973723dab6435d0c86f7849b8305b1e6d99f475138d896bacbb')
+ version('21.2.4', sha256='fe6ede82d1ac02339da3c2ec1820a379641902fd351a52cc01153f76eff85b44')
version('21.2.3', sha256='7245284a159d2484770e1835a673e79e4322a9ddf43b17859668244946db7174')
version('21.2.1', sha256='2c65e6710b419b67456a48beefd0be827b32db416772e0e363d5f7d54dc01787')
version('21.0.3', sha256='565c6f4bd2d5747b919454fc1d439963024fc78ca56fd05158c3b2cde2f6912b')
@@ -35,6 +39,7 @@ class Mesa(MesonPackage):
depends_on('gettext', type='build')
depends_on('python@3:', type='build')
depends_on('py-mako@0.8.0:', type='build')
+ depends_on('unwind')
depends_on('expat')
depends_on('zlib@1.2.3:')
@@ -169,9 +174,19 @@ class Mesa(MesonPackage):
args.append(opt_enable(num_frontends > 1, 'shared-glapi'))
if '+llvm' in spec:
+ # Fix builds on hosts where /usr/bin/llvm-config-* is found and provides an
+ # incompatible version. Ensure that the llvm-config of spec['llvm'] is used.
+ args.append('--native-file')
+ args.append('meson-native-config.ini')
+ mkdirp(self.build_directory)
+ with working_dir(self.build_directory):
+ with open('meson-native-config.ini', 'w') as native_config:
+ llvm_config = spec['llvm'].prefix.bin + '/llvm-config'
+ native_config.write('[binaries]\n')
+ native_config.write("llvm-config = '{0}'\n".format(llvm_config))
args.append('-Dllvm=enabled')
args.append(opt_enable(
- '+link_dylib' in spec['llvm'], 'shared-llvm'))
+ '+llvm_dylib' in spec['llvm'], 'shared-llvm'))
else:
args.append('-Dllvm=disabled')
diff --git a/var/spack/repos/builtin/packages/mesa18/package.py b/var/spack/repos/builtin/packages/mesa18/package.py
index b75ef64751..dbafa810d0 100644
--- a/var/spack/repos/builtin/packages/mesa18/package.py
+++ b/var/spack/repos/builtin/packages/mesa18/package.py
@@ -136,7 +136,7 @@ class Mesa18(AutotoolsPackage):
if '+llvm' in spec:
args.append('--enable-llvm')
args.append('--with-llvm-prefix=%s' % spec['llvm'].prefix)
- if '+link_dylib' in spec['llvm']:
+ if '+llvm_dylib' in spec['llvm']:
args.append('--enable-llvm-shared-libs')
else:
args.append('--disable-llvm-shared-libs')
diff --git a/var/spack/repos/builtin/packages/meshtool/package.py b/var/spack/repos/builtin/packages/meshtool/package.py
index 8e4bdc2ac8..6c6661bcaf 100644
--- a/var/spack/repos/builtin/packages/meshtool/package.py
+++ b/var/spack/repos/builtin/packages/meshtool/package.py
@@ -16,6 +16,7 @@ class Meshtool(MakefilePackage):
version('master', branch='master')
# Version to use with openCARP releases
+ version('oc8.2', commit='6c5cfbd067120901f15a04bf63beec409bda6dc9')
version('oc8.1', commit="6c5cfbd067120901f15a04bf63beec409bda6dc9")
version('oc7.0', commit="6c5cfbd067120901f15a04bf63beec409bda6dc9")
diff --git a/var/spack/repos/builtin/packages/mfem/mfem-4.3-cusparse-11.4.patch b/var/spack/repos/builtin/packages/mfem/mfem-4.3-cusparse-11.4.patch
new file mode 100644
index 0000000000..faec3c1523
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mfem/mfem-4.3-cusparse-11.4.patch
@@ -0,0 +1,80 @@
+diff --git a/linalg/sparsemat.cpp b/linalg/sparsemat.cpp
+index 12136e035..0be73cf7b 100644
+--- a/linalg/sparsemat.cpp
++++ b/linalg/sparsemat.cpp
+@@ -33,7 +33,12 @@ int SparseMatrix::SparseMatrixCount = 0;
+ cusparseHandle_t SparseMatrix::handle = nullptr;
+ size_t SparseMatrix::bufferSize = 0;
+ void * SparseMatrix::dBuffer = nullptr;
+-#endif
++# if CUSPARSE_VERSION >= 11400
++# define MFEM_CUSPARSE_ALG CUSPARSE_SPMV_CSR_ALG1
++# else
++# define MFEM_CUSPARSE_ALG CUSPARSE_CSRMV_ALG1
++# endif // CUSPARSE_VERSION >= 11400
++#endif // MFEM_USE_CUDA
+
+ void SparseMatrix::InitCuSparse()
+ {
+@@ -679,25 +684,16 @@ void SparseMatrix::AddMult(const Vector &x, Vector &y, const double a) const
+ cusparseCreateMatDescr(&matA_descr);
+ cusparseSetMatIndexBase(matA_descr, CUSPARSE_INDEX_BASE_ZERO);
+ cusparseSetMatType(matA_descr, CUSPARSE_MATRIX_TYPE_GENERAL);
+-
+ #endif
+-
+ initBuffers = true;
+ }
+ // Allocate kernel space. Buffer is shared between different sparsemats
+ size_t newBufferSize = 0;
+
+-#if CUDA_VERSION >= 11020
+- cusparseSpMV_bufferSize(handle, CUSPARSE_OPERATION_NON_TRANSPOSE, &alpha,
+- matA_descr,
+- vecX_descr, &beta, vecY_descr, CUDA_R_64F,
+- CUSPARSE_SPMV_CSR_ALG1, &newBufferSize);
+-#elif CUDA_VERSION >= 10010
+ cusparseSpMV_bufferSize(handle, CUSPARSE_OPERATION_NON_TRANSPOSE, &alpha,
+ matA_descr,
+ vecX_descr, &beta, vecY_descr, CUDA_R_64F,
+- CUSPARSE_CSRMV_ALG1, &newBufferSize);
+-#endif
++ MFEM_CUSPARSE_ALG, &newBufferSize);
+
+ // Check if we need to resize
+ if (newBufferSize > bufferSize)
+@@ -707,30 +703,22 @@ void SparseMatrix::AddMult(const Vector &x, Vector &y, const double a) const
+ CuMemAlloc(&dBuffer, bufferSize);
+ }
+
+-#if CUDA_VERSION >= 11020
+- // Update input/output vectors
+- cusparseDnVecSetValues(vecX_descr, const_cast<double *>(d_x));
+- cusparseDnVecSetValues(vecY_descr, d_y);
+-
+- // Y = alpha A * X + beta * Y
+- cusparseSpMV(handle, CUSPARSE_OPERATION_NON_TRANSPOSE, &alpha, matA_descr,
+- vecX_descr, &beta, vecY_descr, CUDA_R_64F, CUSPARSE_SPMV_CSR_ALG1, dBuffer);
+-#elif CUDA_VERSION >= 10010
++#if CUDA_VERSION >= 10010
+ // Update input/output vectors
+ cusparseDnVecSetValues(vecX_descr, const_cast<double *>(d_x));
+ cusparseDnVecSetValues(vecY_descr, d_y);
+
+ // Y = alpha A * X + beta * Y
+ cusparseSpMV(handle, CUSPARSE_OPERATION_NON_TRANSPOSE, &alpha, matA_descr,
+- vecX_descr, &beta, vecY_descr, CUDA_R_64F, CUSPARSE_CSRMV_ALG1, dBuffer);
++ vecX_descr, &beta, vecY_descr, CUDA_R_64F, MFEM_CUSPARSE_ALG, dBuffer);
+ #else
+ cusparseDcsrmv(handle, CUSPARSE_OPERATION_NON_TRANSPOSE,
+ Height(), Width(), J.Capacity(),
+ &alpha, matA_descr,
+ const_cast<double *>(d_A), const_cast<int *>(d_I), const_cast<int *>(d_J),
+ const_cast<double *>(d_x), &beta, d_y);
+-#endif
+-#endif
++#endif // CUDA_VERSION >= 10010
++#endif // MFEM_USE_CUDA
+ }
+ else
+ {
diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py
index 616db65488..29e08d86a8 100644
--- a/var/spack/repos/builtin/packages/mfem/package.py
+++ b/var/spack/repos/builtin/packages/mfem/package.py
@@ -189,6 +189,7 @@ class Mfem(Package, CudaPackage, ROCmPackage):
conflicts('+umpire', when='mfem@:4.0')
conflicts('+amgx', when='mfem@:4.1')
conflicts('+amgx', when='~cuda')
+ conflicts('+mpi~cuda ^hypre+cuda')
conflicts('+superlu-dist', when='~mpi')
conflicts('+strumpack', when='~mpi')
@@ -301,6 +302,7 @@ class Mfem(Package, CudaPackage, ROCmPackage):
patch('mfem-4.2-slepc.patch', when='@4.2.0+slepc')
patch('mfem-4.2-petsc-3.15.0.patch', when='@4.2.0+petsc ^petsc@3.15.0:')
patch('mfem-4.3-hypre-2.23.0.patch', when='@4.3.0')
+ patch('mfem-4.3-cusparse-11.4.patch', when='@4.3.0+cuda')
# Patch to fix MFEM makefile syntax error. See
# https://github.com/mfem/mfem/issues/1042 for the bug report and
diff --git a/var/spack/repos/builtin/packages/microsocks/package.py b/var/spack/repos/builtin/packages/microsocks/package.py
new file mode 100644
index 0000000000..16fd83dbc0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/microsocks/package.py
@@ -0,0 +1,31 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Microsocks(MakefilePackage):
+ """Microsocks is a multithreaded, small, efficient SOCKS5 server.
+ It is a SOCKS5 service that you can run on your remote boxes to
+ tunnel connections through them, if for some reason SSH doesn't
+ cut it for you."""
+
+ homepage = "https://github.com/rofl0r/microsocks"
+ url = "https://github.com/rofl0r/microsocks/archive/refs/tags/v1.0.2.tar.gz"
+ git = "https://github.com/rofl0r/microsocks.git"
+
+ maintainers = ["jcpunk"]
+
+ version("develop", branch="master")
+ version("1.0.2", sha256="5ece77c283e71f73b9530da46302fdb4f72a0ae139aa734c07fe532407a6211a")
+
+ def flag_handler(self, name, flags):
+ if name == "cflags":
+ flags.append(self.compiler.c99_flag)
+ return (flags, None, None)
+
+ @property
+ def install_targets(self):
+ return ["prefix={0}".format(self.prefix), "install"]
diff --git a/var/spack/repos/builtin/packages/modylas/gcc_format.patch b/var/spack/repos/builtin/packages/modylas/gcc_format.patch
index 086c4275aa..086c4275aa 100755..100644
--- a/var/spack/repos/builtin/packages/modylas/gcc_format.patch
+++ b/var/spack/repos/builtin/packages/modylas/gcc_format.patch
diff --git a/var/spack/repos/builtin/packages/modylas/makefile.patch b/var/spack/repos/builtin/packages/modylas/makefile.patch
index b308306ff5..b308306ff5 100755..100644
--- a/var/spack/repos/builtin/packages/modylas/makefile.patch
+++ b/var/spack/repos/builtin/packages/modylas/makefile.patch
diff --git a/var/spack/repos/builtin/packages/mpibind/package.py b/var/spack/repos/builtin/packages/mpibind/package.py
index d1bb6a77e9..86c0c79e6f 100644
--- a/var/spack/repos/builtin/packages/mpibind/package.py
+++ b/var/spack/repos/builtin/packages/mpibind/package.py
@@ -13,39 +13,48 @@ class Mpibind(AutotoolsPackage):
to heterogeneous architectures"""
homepage = "https://github.com/LLNL/mpibind"
- url = "https://github.com/LLNL/mpibind/archive/refs/tags/v0.5.0.tar.gz"
git = "https://github.com/LLNL/mpibind.git"
maintainers = ['eleon']
- # The build process uses 'git describe --tags' to get the
- # package version, thus we need 'get_full_repo'
- version('master', branch='master', get_full_repo=True)
- version('0.7.0', sha256='33077e7eb50322d2bcfe87bb3ea9159c2e49f6f045cbbcd2e69e763c3bec4330')
- version('0.5.0', sha256='51bb27341109aeef121a8630bd56f5551c70ebfd337a459fb70ef9015d97d2b7')
+ # This package uses 'git describe --tags' to get the
+ # package version in Autotools' AC_INIT, thus
+ # 'get_full_repo' is needed.
+ # Furthermore, the package can't be cached because
+ # AC_INIT would be missing the version argument,
+ # which is derived with git.
+ version('master', branch='master', get_full_repo=True)
+ version('0.8.0', commit='ff38b9d', no_cache=True)
+ version('0.7.0', commit='3c437a9', no_cache=True)
+ version('0.5.0', commit='8698f07', no_cache=True)
- variant('cuda', default=False,
+ variant('cuda', default=False,
description='Build w/support for NVIDIA GPUs.')
- variant('rocm', default=False,
+ variant('rocm', default=False,
description='Build w/support for AMD GPUs.')
- variant('flux', default=False,
+ variant('flux', default=False,
description='Build the Flux plugin.')
+ variant('python', default=False,
+ description='Build the Python bindings.')
- depends_on('autoconf', type='build')
- depends_on('automake', type='build')
- depends_on('libtool', type='build')
- depends_on('m4', type='build')
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('m4', type='build')
depends_on('pkgconfig', type='build')
- depends_on('hwloc@2:+libxml2', type='link')
- depends_on('hwloc@2:+pci', when=(sys.platform != 'darwin'), type='link')
- depends_on('hwloc@2:+cuda+nvml', when='+cuda', type='link')
- depends_on('hwloc@2.4:+rocm+opencl', when='+rocm', type='link')
+ depends_on('hwloc@2:+libxml2', type='link')
+ depends_on('hwloc@2:+cuda+nvml', type='link', when='+cuda')
+ depends_on('hwloc@2.4:+rocm+opencl', type='link', when='+rocm')
+ depends_on('hwloc@2:+pci', type='link',
+ when=(sys.platform != 'darwin'))
- # Requiring @master temporarily while Flux adds
- # FLUX_SHELL_RC_PATH to a stable version (>0.29.0).
- # mpibind will require at least such version.
- depends_on('flux-core@master', when='+flux', type='link')
+ # flux-core >= 0.30.0 supports FLUX_SHELL_RC_PATH,
+ # which is needed to load the plugin into Flux
+ depends_on('flux-core@0.30:', when='+flux', type='link')
+
+ depends_on('python@3:', when='+python', type=('build', 'run'))
+ depends_on('py-cffi', when='+python', type=('build', 'run'))
def autoreconf(self, spec, prefix):
autoreconf('--install', '--verbose', '--force')
@@ -56,6 +65,8 @@ class Mpibind(AutotoolsPackage):
env.prepend_path('FLUX_SHELL_RC_PATH',
join_path(self.prefix, 'share', 'mpibind'))
- # To build and run the tests, make sure 'libtap' is installed
- # on the target system and is recognized by pkg-config.
- # Unfortunately, libtap is not in Spack.
+ # To build and run the C unit tests, make sure 'libtap'
+ # is installed and recognized by pkgconfig.
+ # To build and run the Python unit tests, make sure 'pycotap'
+ # is installed in your Python environment.
+ # Unfortunately, 'tap' and 'pycotap' are not in Spack.
diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index cae3ea70af..16d2035959 100644
--- a/var/spack/repos/builtin/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -81,6 +81,15 @@ spack package at this time.''',
description='Enable Argobots support')
variant('fortran', default=True, description='Enable Fortran support')
+ variant(
+ 'two_level_namespace',
+ default=False,
+ description='''Build shared libraries and programs
+built with the mpicc/mpifort/etc. compiler wrappers
+with '-Wl,-commons,use_dylibs' and without
+'-Wl,-flat_namespace'.'''
+ )
+
provides('mpi@:3.1')
provides('mpi@:3.0', when='@:3.1')
provides('mpi@:2.2', when='@:1.2')
@@ -475,6 +484,9 @@ spack package at this time.''',
config_args.append('--with-thread-package=argobots')
config_args.append('--with-argobots=' + spec['argobots'].prefix)
+ if '+two_level_namespace' in spec:
+ config_args.append('--enable-two-level-namespace')
+
return config_args
@run_after('install')
diff --git a/var/spack/repos/builtin/packages/mpitrampoline/package.py b/var/spack/repos/builtin/packages/mpitrampoline/package.py
index 374396432b..3071c64f78 100644
--- a/var/spack/repos/builtin/packages/mpitrampoline/package.py
+++ b/var/spack/repos/builtin/packages/mpitrampoline/package.py
@@ -17,6 +17,12 @@ class Mpitrampoline(CMakePackage):
maintainers = ['eschnett']
version('develop', branch='main')
+ version('2.8.0', sha256='bc2a075ced19e5f7ea547060e284887bdbb0761d34d1adb6f16d2e9e096a7d38')
+ version('2.7.0', sha256='b188657e41b240bba663ce5b3d7b73377a27a64edcc1e0aaa7c924cf00e30b42')
+ version('2.6.0', sha256='5425085f4b8772990b28a643b7dfc7ac37a399ee35ffa3d6113a06e5b508dfac')
+ version('2.5.0', sha256='26423749a6a45324062cbe82eb6934236b0c8ea17f9d5b594ed0c15ea8d0dbad')
+ version('2.4.0', sha256='e08785cf5b43c9913d890be44f6e7a551a83f34f389f6db9136db2379697fadd')
+ version('2.3.0', sha256='4559acb13d34b9a052752a9e0f928d31da54bfa7b05f31585bf6a66fadaceca4')
version('2.2.0', sha256='fa213a7ac03b4c54d5c9281192fb604747d4b5be4ce9b54b4c740f3da7a6aaea')
version('2.1.0', sha256='8794c07772ecc6d979ecf475653ae571a593d01ef2df51ccbc63c9f9d9c67856')
version('2.0.0', sha256='50d4483f73ea4a79a9b6d025d3abba42f76809cba3165367f4810fb8798264b6')
diff --git a/var/spack/repos/builtin/packages/mt-metis/non_x8664.patch b/var/spack/repos/builtin/packages/mt-metis/non_x8664.patch
index 8eb305e5c2..8eb305e5c2 100755..100644
--- a/var/spack/repos/builtin/packages/mt-metis/non_x8664.patch
+++ b/var/spack/repos/builtin/packages/mt-metis/non_x8664.patch
diff --git a/var/spack/repos/builtin/packages/mujoco/package.py b/var/spack/repos/builtin/packages/mujoco/package.py
new file mode 100644
index 0000000000..a29772b1f1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mujoco/package.py
@@ -0,0 +1,49 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import platform
+
+from spack import *
+
+
+class Mujoco(Package):
+ """MuJoCo is a physics engine that aims to facilitate research and
+ development in robotics, biomechanics, graphics and animation, and
+ other areas where fast and accurate simulation is needed. """
+
+ homepage = "https://mujoco.org/"
+
+ mujoco_releases = {
+ '2.1.0': {
+ 'Linux-x86_64': 'a436ca2f4144c38b837205635bbd60ffe1162d5b44c87df22232795978d7d012',
+ 'Darwin-x86_64': '50226f859d9d3742fa57e1a0a92d656197ec5786f75bfa50ae00eb80fae25e90',
+ }
+ }
+
+ for ver, packages in mujoco_releases.items():
+ key = "{0}-{1}".format(platform.system(), platform.machine())
+ pkg_sha256 = packages.get(key)
+ if pkg_sha256:
+ version(ver, sha256=pkg_sha256)
+
+ def url_for_version(self, version):
+
+ url = "https://mujoco.org/download/mujoco{0}-{1}-x86_64.tar.gz"
+
+ system_map = {
+ 'Linux': 'linux',
+ 'Darwin': 'macos',
+ }
+
+ return url.format(version.joined, system_map[platform.system()])
+
+ def install(self, spec, prefix):
+ copy_tree('.', prefix)
+
+ def setup_run_environment(self, env):
+ env.prepend_path('CPATH', prefix.include)
+ env.prepend_path('LD_LIBRARY_PATH', prefix.bin)
+ if platform.system() == 'Darwin':
+ env.prepend_path('DYLD_LIBRARY_PATH', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py
index 9b9346785e..957a4ba9dc 100644
--- a/var/spack/repos/builtin/packages/mumps/package.py
+++ b/var/spack/repos/builtin/packages/mumps/package.py
@@ -182,11 +182,14 @@ class Mumps(Package):
optf.append('-qfixed')
# As of version 5.2.0, MUMPS is able to take advantage
- # of the GEMMT BLAS extension. MKL is currently the only
+ # of the GEMMT BLAS extension. MKL and amdblis are the only
# known BLAS implementation supported.
if '@5.2.0: ^mkl' in self.spec:
optf.append('-DGEMMT_AVAILABLE')
+ if '@5.2.0: ^amdblis@3.0:' in self.spec:
+ optf.append('-DGEMMT_AVAILABLE')
+
if '+openmp' in self.spec:
optc.append(self.compiler.openmp_flag)
optf.append(self.compiler.openmp_flag)
@@ -360,7 +363,7 @@ class Mumps(Package):
@property
def libs(self):
- component_libs = ['*mumps*', 'pord']
+ component_libs = ['*mumps', 'mumps_common', 'pord']
return find_libraries(['lib' + comp for comp in component_libs],
root=self.prefix.lib,
shared=('+shared' in self.spec),
diff --git a/var/spack/repos/builtin/packages/muparserx/package.py b/var/spack/repos/builtin/packages/muparserx/package.py
new file mode 100644
index 0000000000..77997d7842
--- /dev/null
+++ b/var/spack/repos/builtin/packages/muparserx/package.py
@@ -0,0 +1,14 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class Muparserx(CMakePackage):
+ """A C++ Library for Parsing Expressions with Strings, Complex
+ Numbers, Vectors, Matrices and more. """
+
+ homepage = "https://beltoforion.de/en/muparserx/"
+ url = "https://github.com/beltoforion/muparserx/archive/refs/tags/v4.0.8.tar.gz"
+
+ version('4.0.8', sha256='5913e0a4ca29a097baad1b78a4674963bc7a06e39ff63df3c73fbad6fadb34e1')
diff --git a/var/spack/repos/builtin/packages/mvapich2-gdr/package.py b/var/spack/repos/builtin/packages/mvapich2-gdr/package.py
index 5b5820641c..5b5820641c 100755..100644
--- a/var/spack/repos/builtin/packages/mvapich2-gdr/package.py
+++ b/var/spack/repos/builtin/packages/mvapich2-gdr/package.py
diff --git a/var/spack/repos/builtin/packages/mvapich2x/package.py b/var/spack/repos/builtin/packages/mvapich2x/package.py
index 3a75e3876e..3a75e3876e 100755..100644
--- a/var/spack/repos/builtin/packages/mvapich2x/package.py
+++ b/var/spack/repos/builtin/packages/mvapich2x/package.py
diff --git a/var/spack/repos/builtin/packages/nalu-wind/package.py b/var/spack/repos/builtin/packages/nalu-wind/package.py
index 5739579dcd..a1edc6fce6 100644
--- a/var/spack/repos/builtin/packages/nalu-wind/package.py
+++ b/var/spack/repos/builtin/packages/nalu-wind/package.py
@@ -52,16 +52,16 @@ class NaluWind(CMakePackage, CudaPackage):
depends_on('mpi')
depends_on('yaml-cpp@0.5.3:')
- depends_on('trilinos@master,develop ~cuda~wrapper+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist~superlu+hdf5+shards~hypre cxxstd=14', when='~cuda')
+ depends_on('trilinos@master,develop ~cuda~wrapper+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist~superlu+hdf5+shards~hypre', when='~cuda')
# Cannot build Trilinos as a shared library with STK on Darwin
# https://github.com/trilinos/Trilinos/issues/2994
- depends_on('trilinos@master,develop ~cuda~wrapper+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist~superlu+hdf5+shards~hypre~shared cxxstd=14', when=(sys.platform == 'darwin'))
+ depends_on('trilinos@master,develop ~cuda~wrapper+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist~superlu+hdf5+shards~hypre~shared', when=(sys.platform == 'darwin'))
depends_on('openfast@2.6.0 +cxx', when='+openfast')
depends_on('tioga@master,develop', when='+tioga')
depends_on('hypre@develop,2.18.2: ~int64+mpi~superlu-dist', when='+hypre')
depends_on('kokkos-nvcc-wrapper', type='build', when='+cuda')
for _arch in CudaPackage.cuda_arch_values:
- depends_on('trilinos@master,develop ~shared+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist~superlu+hdf5+shards~hypre+cuda+cuda_rdc+wrapper cxxstd=14 cuda_arch={0}'.format(_arch),
+ depends_on('trilinos@master,develop ~shared+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist~superlu+hdf5+shards~hypre+cuda+cuda_rdc+wrapper cuda_arch={0}'.format(_arch),
when='+cuda cuda_arch={0}'.format(_arch))
depends_on('hypre@develop +mpi+cuda~int64~superlu-dist cuda_arch={0}'.format(_arch),
when='+hypre+cuda cuda_arch={0}'.format(_arch))
diff --git a/var/spack/repos/builtin/packages/nalu/package.py b/var/spack/repos/builtin/packages/nalu/package.py
index f6973b8610..7b02767f43 100644
--- a/var/spack/repos/builtin/packages/nalu/package.py
+++ b/var/spack/repos/builtin/packages/nalu/package.py
@@ -35,7 +35,7 @@ class Nalu(CMakePackage):
# Cannot build Trilinos as a shared library with STK on Darwin
# which is why we have a 'shared' variant for Nalu
# https://github.com/trilinos/Trilinos/issues/2994
- depends_on('trilinos+mpi+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist+superlu+hdf5+zlib+shards~hypre@master')
+ depends_on('trilinos+mpi+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist+superlu+hdf5+shards~hypre@master')
depends_on('trilinos~shared', when='~shared')
# Optional dependencies
depends_on('tioga', when='+tioga+shared')
diff --git a/var/spack/repos/builtin/packages/namd/package.py b/var/spack/repos/builtin/packages/namd/package.py
index afd20ec847..9e930b2459 100644
--- a/var/spack/repos/builtin/packages/namd/package.py
+++ b/var/spack/repos/builtin/packages/namd/package.py
@@ -13,7 +13,7 @@ from spack import *
class Namd(MakefilePackage, CudaPackage):
- """NAMDis a parallel molecular dynamics code designed for
+ """NAMD is a parallel molecular dynamics code designed for
high-performance simulation of large biomolecular systems."""
homepage = "https://www.ks.uiuc.edu/Research/namd/"
diff --git a/var/spack/repos/builtin/packages/nccl/package.py b/var/spack/repos/builtin/packages/nccl/package.py
index ab578f0fd2..88bc6d2c46 100644
--- a/var/spack/repos/builtin/packages/nccl/package.py
+++ b/var/spack/repos/builtin/packages/nccl/package.py
@@ -14,6 +14,8 @@ class Nccl(MakefilePackage, CudaPackage):
maintainers = ['adamjstewart']
+ version('2.11.4-1', sha256='db4e9a0277a64f9a31ea9b5eea22e63f10faaed36dded4587bbc8a0d8eceed10')
+ version('2.10.3-1', sha256='55de166eb7dcab9ecef2629cdb5fb0c5ebec4fae03589c469ebe5dcb5716b3c5')
version('2.9.9-1', sha256='01629a1bdadbadb2828e26023ba7685bbc07678468cb7df63cc96460f5337e08')
version('2.9.8-1', sha256='f6e5d9c10e6e54ee21f9707d2df684083d0cccf87bd5a4dbc795803da2bc9f5a')
version('2.9.6-1', sha256='c4b1f5a88f03c0ac8f1dcbe27723cd75cfe051754078d83629efaaed10ce8731')
diff --git a/var/spack/repos/builtin/packages/ncio/package.py b/var/spack/repos/builtin/packages/ncio/package.py
index 9374b43e84..3c66401b99 100644
--- a/var/spack/repos/builtin/packages/ncio/package.py
+++ b/var/spack/repos/builtin/packages/ncio/package.py
@@ -21,3 +21,9 @@ class Ncio(CMakePackage):
depends_on('mpi')
depends_on('netcdf-fortran')
+
+ def setup_run_environment(self, env):
+ lib = find_libraries('libncio', root=self.prefix, shared=False, recursive=True)
+ env.set('NCIO_LIB', lib[0])
+ env.set('NCIO_INC', join_path(self.prefix, 'include'))
+ env.set('NCIO_LIBDIR', lib[0])
diff --git a/var/spack/repos/builtin/packages/ncl/package.py b/var/spack/repos/builtin/packages/ncl/package.py
index a4bd503d07..c4d7659cd4 100644
--- a/var/spack/repos/builtin/packages/ncl/package.py
+++ b/var/spack/repos/builtin/packages/ncl/package.py
@@ -56,6 +56,7 @@ class Ncl(Package):
depends_on('flex+lex')
depends_on('iconv')
depends_on('tcsh')
+ depends_on('makedepend', type='build')
# Also, the manual says that ncl requires zlib, but that comes as a
# mandatory dependency of libpng, which is a mandatory dependency of cairo.
@@ -71,6 +72,7 @@ class Ncl(Package):
depends_on('bzip2')
depends_on('freetype')
depends_on('fontconfig')
+ depends_on('zstd')
# In Spack, we do not have an option to compile netcdf-c without netcdf-4
# support, so we will tell the ncl configuration script that we want
@@ -160,8 +162,8 @@ class Ncl(Package):
f.writelines([
'#define HdfDefines\n',
'#define CppCommand \'/usr/bin/env cpp -traditional\'\n',
- '#define CCompiler cc\n',
- '#define FCompiler fc\n',
+ '#define CCompiler {0}\n'.format(spack_cc),
+ '#define FCompiler {0}\n'.format(spack_fc),
('#define CtoFLibraries ' + ' '.join(c2f_flags) + '\n'
if len(c2f_flags) > 0
else ''),
diff --git a/var/spack/repos/builtin/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py
index e9b469647e..08b1bae47e 100644
--- a/var/spack/repos/builtin/packages/ncurses/package.py
+++ b/var/spack/repos/builtin/packages/ncurses/package.py
@@ -20,7 +20,7 @@ class Ncurses(AutotoolsPackage, GNUMirrorPackage):
# URL must remain http:// so Spack can bootstrap curl
gnu_mirror_path = "ncurses/ncurses-6.1.tar.gz"
- executables = [r'^ncursesw?\d*-config$']
+ executables = [r'^ncursesw?(?:\d+(?:\.\d+)*)?-config$']
version('6.2', sha256='30306e0c76e0f9f1f0de987cf1c82a5c21e1ce6568b9227f7da5b71cbea86c9d')
version('6.1', sha256='aa057eeeb4a14d470101eff4597d5833dcef5965331be3528c08d99cebaa0d17')
@@ -101,7 +101,8 @@ class Ncurses(AutotoolsPackage, GNUMirrorPackage):
'--enable-overwrite',
'--without-ada',
'--enable-pc-files',
- '--with-pkg-config-libdir={0}/lib/pkgconfig'.format(self.prefix)
+ '--with-pkg-config-libdir={0}/lib/pkgconfig'.format(self.prefix),
+ '--disable-overwrite'
]
nwide_opts = ['--disable-widec',
@@ -148,14 +149,11 @@ class Ncurses(AutotoolsPackage, GNUMirrorPackage):
with working_dir('build_ncursesw'):
make('install')
- # fix for packages like hstr that use "#include <ncurses/ncurses.h>"
- headers = glob.glob(os.path.join(prefix.include, '*'))
- for p_dir in ['ncurses', 'ncursesw']:
- path = os.path.join(prefix.include, p_dir)
- if not os.path.exists(path):
- os.makedirs(path)
- for header in headers:
- install(header, path)
+ # fix for packages that use "#include <ncurses.h>" (use wide by default)
+ headers = glob.glob(os.path.join(prefix.include, 'ncursesw', '*.h'))
+ for header in headers:
+ h = os.path.basename(header)
+ os.symlink(os.path.join('ncursesw', h), os.path.join(prefix.include, h))
@property
def libs(self):
diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py
index a3ca409c8f..cf62abcc34 100644
--- a/var/spack/repos/builtin/packages/nextflow/package.py
+++ b/var/spack/repos/builtin/packages/nextflow/package.py
@@ -14,6 +14,8 @@ class Nextflow(Package):
maintainers = ['dialvarezs']
+ version('21.10.1', sha256='05c8b9f3d2f5eded737fdd0a13b84e3bc442cc6355ba95e21118cb624f8176da', expand=False)
+ version('21.10.0', sha256='e938e53f43f0f00c8d5adf2dc104c4ce0c6d834aa84a4a3918ac8bec6eee6b9c', expand=False)
version('21.04.3', sha256='80c7ecd94b55da8eb0e17040dbd0c43ee80e252cd999374e16c00d54d3d3abf3', expand=False)
version('20.10.0', sha256='54f76c83cbabe8ec68d6a878dcf921e647284499f4ae917356e594d873cb78dd', expand=False)
version('20.07.1', sha256='de4db5747a801af645d9b021c7b36f4a25c3ce1a8fda7705a5f37e8f9357443a', expand=False)
diff --git a/var/spack/repos/builtin/packages/nnvm/cmake2.patch b/var/spack/repos/builtin/packages/nnvm/cmake2.patch
index 5536ca718f..5536ca718f 100755..100644
--- a/var/spack/repos/builtin/packages/nnvm/cmake2.patch
+++ b/var/spack/repos/builtin/packages/nnvm/cmake2.patch
diff --git a/var/spack/repos/builtin/packages/nsimd/package.py b/var/spack/repos/builtin/packages/nsimd/package.py
index 9e2f8e5ef6..3a18326cc9 100644
--- a/var/spack/repos/builtin/packages/nsimd/package.py
+++ b/var/spack/repos/builtin/packages/nsimd/package.py
@@ -66,6 +66,7 @@ class Nsimd(CMakePackage):
depends_on('cmake@2.8.7:', type='build')
depends_on('cmake@3.0.2:', type='build', when='@2:')
depends_on('python@3:', type='build')
+ depends_on('py-chardet', type='build', when='@3:')
depends_on('py-requests', type='build', when='@3:')
# Add a 'generate_code' phase in the beginning
diff --git a/var/spack/repos/builtin/packages/nspr/package.py b/var/spack/repos/builtin/packages/nspr/package.py
index 33b58f3b16..f8c6cc0f90 100644
--- a/var/spack/repos/builtin/packages/nspr/package.py
+++ b/var/spack/repos/builtin/packages/nspr/package.py
@@ -13,6 +13,7 @@ class Nspr(AutotoolsPackage):
homepage = "https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSS/Reference/NSPR_functions"
url = "https://ftp.mozilla.org/pub/nspr/releases/v4.13.1/src/nspr-4.13.1.tar.gz"
+ version('4.32', sha256='bb6bf4f534b9559cf123dcdc6f9cd8167de950314a90a88b2a329c16836e7f6c')
version('4.31', sha256='5729da87d5fbf1584b72840751e0c6f329b5d541850cacd1b61652c95015abc8')
version('4.13.1', sha256='5e4c1751339a76e7c772c0c04747488d7f8c98980b434dc846977e43117833ab')
diff --git a/var/spack/repos/builtin/packages/nss/package.py b/var/spack/repos/builtin/packages/nss/package.py
index 42292e1ed2..3f74c53b0c 100644
--- a/var/spack/repos/builtin/packages/nss/package.py
+++ b/var/spack/repos/builtin/packages/nss/package.py
@@ -16,7 +16,9 @@ class Nss(MakefilePackage):
homepage = "https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSS"
url = "https://ftp.mozilla.org/pub/security/nss/releases/NSS_3_67_RTM/src/nss-3.67.tar.gz"
- version('3.67', sha256='f6549a9148cd27b394b40c77fa73111d5ea23cdb51d796665de1b7458f88ce7f')
+ version('3.73', sha256='566d3a68da9b10d7da9ef84eb4fe182f8f04e20d85c55d1bf360bb2c0096d8e5')
+ # Everything before 3.73 is vulnerable (CVE-2021-43527)
+ version('3.67', sha256='f6549a9148cd27b394b40c77fa73111d5ea23cdb51d796665de1b7458f88ce7f', deprecated=True)
depends_on('nspr@4.24:')
depends_on('sqlite')
@@ -26,6 +28,11 @@ class Nss(MakefilePackage):
build_directory = 'nss'
+ def url_for_version(self, version):
+ url = 'https://ftp.mozilla.org/pub/security/nss/releases/NSS_{0}_RTM/src/nss-{1}.tar.gz'
+
+ return url.format(version.underscored, version)
+
@property
def build_targets(self):
# We cannot use nss_build_all because this will try to build nspr.
diff --git a/var/spack/repos/builtin/packages/ntl/package.py b/var/spack/repos/builtin/packages/ntl/package.py
new file mode 100644
index 0000000000..7313e1884d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ntl/package.py
@@ -0,0 +1,62 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Ntl(Package):
+ """
+ NTL -- a library for doing number theory
+
+ NTL is open-source software distributed under the terms of the GNU Lesser
+ General Public License (LGPL) version 2.1 or later. See the file
+ doc/copying.txt for complete details on the licensing of NTL.
+
+ Documentation is available in the file doc/tour.html, which can be viewed
+ with a web browser.
+
+ """
+
+ homepage = "https://libntl.org"
+ url = "https://github.com/libntl/ntl/archive/refs/tags/v11.5.1.tar.gz"
+
+ maintainers = ['wohlbier']
+
+ version('11.5.1', sha256='ef578fa8b6c0c64edd1183c4c303b534468b58dd3eb8df8c9a5633f984888de5')
+ version('11.5.0', sha256='9e1e6488b177c3e5d772fdd6279c890937a9d1c3b694a904ac1cfbe9cab836db')
+ version('11.4.4', sha256='2ce7a10fadbed6c3859d72c859612a4ca0dbdf6a9db99db4261422b7f0804bfa')
+
+ variant('shared', default=False, description='Build shared library.')
+
+ depends_on('gmp')
+
+ phases = ['configure', 'build', 'install']
+
+ def configure_args(self):
+ spec = self.spec
+ prefix = self.prefix
+
+ config_args = [
+ 'CXX={0}'.format(self.compiler.cxx),
+ 'DEF_PREFIX={0}'.format(prefix),
+ 'GMP_PREFIX={0}'.format(spec['gmp'].prefix) # gmp dependency
+ ]
+ if '+shared' in spec:
+ config_args.append('SHARED=on')
+
+ return config_args
+
+ def configure(self, spec, prefix):
+ with working_dir('src'):
+ configure = Executable('./configure')
+ configure(*self.configure_args())
+
+ def build(self, spec, prefix):
+ with working_dir('src'):
+ make()
+
+ def install(self, spec, prefix):
+ with working_dir('src'):
+ make('install')
diff --git a/var/spack/repos/builtin/packages/nvhpc/package.py b/var/spack/repos/builtin/packages/nvhpc/package.py
index dc5876f7eb..df715a9bce 100644
--- a/var/spack/repos/builtin/packages/nvhpc/package.py
+++ b/var/spack/repos/builtin/packages/nvhpc/package.py
@@ -22,6 +22,10 @@ from spack.util.prefix import Prefix
# - package key must be in the form '{os}-{arch}' where 'os' is in the
# format returned by platform.system() and 'arch' by platform.machine()
_versions = {
+ '21.11': {
+ 'Linux-aarch64': ('3b11bcd9cca862fabfce1e7bcaa2050ea12130c7e897f4e7859ba4c155d20720', 'https://developer.download.nvidia.com/hpc-sdk/21.11/nvhpc_2021_2111_Linux_aarch64_cuda_multi.tar.gz'),
+ 'Linux-ppc64le': ('ac51ed92de4eb5e1bdb064ada5bbace5b89ac732ad6c6473778edfb8d29a6527', 'https://developer.download.nvidia.com/hpc-sdk/21.11/nvhpc_2021_2111_Linux_ppc64le_cuda_multi.tar.gz'),
+ 'Linux-x86_64': ('d8d8ccd0e558d22bcddd955f2233219c96f7de56aa8e09e7be833e384d32d6aa', 'https://developer.download.nvidia.com/hpc-sdk/21.11/nvhpc_2021_2111_Linux_x86_64_cuda_multi.tar.gz')},
'21.9': {
'Linux-aarch64': ('52c2c66e30043add4afccedf0ba77daa0000bf42e0db844baa630bb635b91a7d', 'https://developer.download.nvidia.com/hpc-sdk/21.9/nvhpc_2021_219_Linux_aarch64_cuda_multi.tar.gz'),
'Linux-ppc64le': ('cff0b55fb782be1982bfeec1d9763b674ddbf84ff2c16b364495299266320289', 'https://developer.download.nvidia.com/hpc-sdk/21.9/nvhpc_2021_219_Linux_ppc64le_cuda_multi.tar.gz'),
diff --git a/var/spack/repos/builtin/packages/nwchem/package.py b/var/spack/repos/builtin/packages/nwchem/package.py
index 1ce1846834..02a18c4270 100644
--- a/var/spack/repos/builtin/packages/nwchem/package.py
+++ b/var/spack/repos/builtin/packages/nwchem/package.py
@@ -24,10 +24,14 @@ class Nwchem(Package):
version('6.8.1', sha256='fd20f9ca1b410270a815e77e052ec23552f828526cd252709f798f589b2a6431',
url='https://github.com/nwchemgit/nwchem/releases/download/6.8.1-release/nwchem-6.8.1-release.revision-v6.8-133-ge032219-srconly.2018-06-14.tar.bz2')
+ variant('openmp', default=False, description='Enables OpenMP support')
+ variant('mpipr', default=False, description='Enables ARMCI with progress rank')
+
depends_on('blas')
depends_on('lapack')
depends_on('mpi')
depends_on('scalapack')
+ depends_on('fftw-api')
depends_on('python@3:', when='@7:', type=('build', 'link', 'run'))
depends_on('python@2.7:2.8', when='@:6', type=('build', 'link', 'run'))
conflicts('%gcc@10:', when='@:6', msg='NWChem versions prior to 7.0.0 do not build with GCC 10')
@@ -36,6 +40,7 @@ class Nwchem(Package):
scalapack = spec['scalapack'].libs
lapack = spec['lapack'].libs
blas = spec['blas'].libs
+ fftw = spec['fftw-api'].libs
# see https://nwchemgit.github.io/Compiling-NWChem.html
args = []
args.extend([
@@ -45,14 +50,19 @@ class Nwchem(Package):
'CC=%s' % os.path.basename(spack_cc),
'FC=%s' % os.path.basename(spack_fc),
'USE_MPI=y',
+ 'USE_BLAS=y',
+ 'USE_FFTW3=y',
'PYTHONVERSION=%s' % spec['python'].version.up_to(2),
'BLASOPT=%s' % ((lapack + blas).ld_flags),
'BLAS_LIB=%s' % blas.ld_flags,
'LAPACK_LIB=%s' % lapack.ld_flags,
'SCALAPACK_LIB=%s' % scalapack.ld_flags,
+ 'FFTW3_LIB=%s' % fftw.ld_flags,
+ 'FFTW3_INCLUDE={0}'.format(spec['fftw-api'].prefix.include),
'NWCHEM_MODULES=all python',
'NWCHEM_LONG_PATHS=Y', # by default NWCHEM_TOP is 64 char max
- 'USE_NOIO=Y' # skip I/O algorithms
+ 'USE_NOIO=Y', # skip I/O algorithms
+ 'USE_NOFSCHECK=TRUE' # FSCHECK, caused problems like code crashes
])
if spec.version < Version('7.0.0'):
args.extend([
@@ -90,6 +100,12 @@ class Nwchem(Package):
args.extend(['NWCHEM_TARGET=%s' % target])
+ if '+openmp' in spec:
+ args.extend(['USE_OPENMP=y'])
+
+ if '+mpipr' in spec:
+ args.extend(['ARMCI_NETWORK=MPI-PR'])
+
with working_dir('src'):
make('nwchem_config', *args)
if use_32_bit_lin_alg:
diff --git a/var/spack/repos/builtin/packages/ocaml/fix-duplicate-defs.patch b/var/spack/repos/builtin/packages/ocaml/fix-duplicate-defs.patch
index bbbc283d6a..bbbc283d6a 100755..100644
--- a/var/spack/repos/builtin/packages/ocaml/fix-duplicate-defs.patch
+++ b/var/spack/repos/builtin/packages/ocaml/fix-duplicate-defs.patch
diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py
index 172ec2af79..5b3885eedf 100644
--- a/var/spack/repos/builtin/packages/octave/package.py
+++ b/var/spack/repos/builtin/packages/octave/package.py
@@ -46,6 +46,7 @@ class Octave(AutotoolsPackage, GNUMirrorPackage):
# Variants
variant('readline', default=True)
+ variant('bz2', default=True)
variant('arpack', default=False)
variant('curl', default=False)
variant('fftw', default=False)
@@ -74,9 +75,11 @@ class Octave(AutotoolsPackage, GNUMirrorPackage):
depends_on('sed', when=sys.platform == 'darwin', type='build')
depends_on('pcre')
depends_on('pkgconfig', type='build')
+ depends_on('texinfo', type='build')
# Strongly recommended dependencies
depends_on('readline', when='+readline')
+ depends_on('bzip2', when='+bz2')
# Optional dependencies
depends_on('arpack-ng', when='+arpack')
@@ -181,6 +184,14 @@ class Octave(AutotoolsPackage, GNUMirrorPackage):
else:
config_args.append('--disable-readline')
+ if '+bz2' in spec:
+ config_args.extend([
+ "--with-bz2-includedir=%s" % spec['bzip2'].prefix.include,
+ "--with-bz2-libdir=%s" % spec['bzip2'].prefix.lib
+ ])
+ else:
+ config_args.append("--without-bz2")
+
# Optional dependencies
if '+arpack' in spec:
sa = spec['arpack-ng']
@@ -275,6 +286,8 @@ class Octave(AutotoolsPackage, GNUMirrorPackage):
else:
config_args.append("--without-qrupdate")
+ config_args += self.with_or_without("qscintilla")
+
if '+zlib' in spec:
config_args.extend([
"--with-z-includedir=%s" % spec['zlib'].prefix.include,
@@ -293,6 +306,9 @@ class Octave(AutotoolsPackage, GNUMirrorPackage):
if spec.satisfies('%fj'):
config_args.append('--enable-fortran-calling-convention=gfortran')
+ # Make sure we do not use qtchooser
+ config_args.append('ac_cv_prog_ac_ct_QTCHOOSER=')
+
return config_args
# ========================================================================
diff --git a/var/spack/repos/builtin/packages/oommf/package.py b/var/spack/repos/builtin/packages/oommf/package.py
new file mode 100644
index 0000000000..fa5de8b76d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/oommf/package.py
@@ -0,0 +1,295 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os.path
+
+from spack import *
+from spack.util.executable import Executable
+
+
+class Oommf(Package):
+ """The Object Oriented MicroMagnetic Framework (OOMMF) is aimed at
+ developing portable, extensible public domain programs and tools for
+ micromagnetics.
+
+ The code forms a completely functional micromagnetics package, with
+ the additional capability to be extended by other programmers so that
+ people developing new code can build on the OOMMF foundation. OOMMF is
+ written in C++, a widely-available, object-oriented language that can
+ produce programs with good performance as well as extensibility. For
+ portable user interfaces, we make use of Tcl/Tk so that OOMMF operates
+ across a wide range of Unix, Windows, and Mac OS X platforms. The main
+ contributors to OOMMF are Mike Donahue, and Don Porter.
+
+ Summary taken from OOMMF documentation https://math.nist.gov/oommf/
+
+ OOMMF home page: "https://math.nist.gov/oommf/"
+
+ OOMMF as a git repository: https://github.com/fangohr/oommf
+
+ Versions ending with "-vanilla" indicate that the sources are taken
+ directly from https://math.nist.gov/oommf/dist/ . All other versions are
+ from the https://github.com/fangohr/oommf (which includes the "-vanilla"
+ sources, and adds additional OOMMF extensions). See
+ https://github.com/fangohr/oommf for details.
+ """
+
+ homepage = "https://math.nist.gov/oommf/"
+ # default URL for versions
+ url = "https://github.com/fangohr/oommf/archive/refs/tags/20a1_20180930_ext.tar.gz"
+
+ maintainers = ["fangohr"]
+
+ version(
+ "20a3_20210930",
+ sha256="880242afdf4c84de7f2a3c42ab0ad8c354028a7d2d3c3160980cf3e08e285691",
+ )
+
+ version(
+ "20a3_20210930-vanilla",
+ url="https://math.nist.gov/oommf/dist/oommf20a3_20210930.tar.gz",
+ sha256="a2a24c1452e66baf37fea67edbcbfb78d60c65a78c6b032a18a1de9f8bbebc92",
+ )
+
+ version(
+ "20a2_20200608",
+ sha256="a3113f2aca0b6249ee99b2f4874f31de601bd7af12498d84f28706b265fa50ab",
+ )
+
+ version(
+ "20a1_20180930_ext",
+ sha256="18bf9bd713c7ee6ced6d561ce742d17e0588ae24ef2e56647a5c8a7853e07a4c",
+ )
+
+ version(
+ "20a2_20200608-vanilla",
+ sha256="5c349de6e698b0c2c5390aa0598ea3052169438cdcc7e298068bc03abb9761c8",
+ url="https://math.nist.gov/oommf/dist/oommf20a2_20200608-hotfix.tar.gz",
+ )
+
+ # Deprecated versions have never been tested with spack
+ version(
+ "20a2_20190930-vanilla",
+ sha256="53b41ef30f76766239a1071d13081d8d7604a2ea59187ca4abef356ad1be4986",
+ url="https://math.nist.gov/oommf/dist/oommf20a2_20190930.tar.gz",
+ deprecated=True,
+ )
+
+ version(
+ "20a1_20180930",
+ deprecated=True,
+ sha256="c871e0dbb1522c3c1314af6c084b90cdbe69fd869b55ac94443851b74f818ed2",
+ )
+
+ version(
+ "20a0_20170929a0",
+ deprecated=True,
+ sha256="3439d1c9e95cc7395bc2e2330bba8cf198585d1b350251ea8561c1554ff8c7fd",
+ url="https://github.com/fangohr/oommf/archive/refs/tags/2.0a0_20170929a0.tar.gz",
+ )
+
+ version(
+ "12b0_20160930",
+ deprecated=True,
+ sha256="363006f549bb63a39564fafc18b52342a14c1c3769c214467a39f72a0c0be36b",
+ url="https://github.com/fangohr/oommf/archive/refs/tags/1.2b0_20160930b1.tar.gz",
+ )
+
+ depends_on("tk", type=("build", "link", "test", "run"))
+ depends_on("tcl", type=("build", "test", "run"))
+ depends_on("xproto", type=("build"))
+
+ # Compilation with clang does not work yet (gcc works fine, nothing else tested)
+ # (https://github.com/spack/spack/pull/26933#pullrequestreview-789754233)
+ conflicts("%clang")
+
+ phases = ["configure", "build", "install"]
+
+ # sanity checks: (https://spack.readthedocs.io/en/latest/packaging_guide.html#checking-an-installation)
+ sanity_check_is_file = [join_path("bin", "oommf.tcl")]
+ sanity_check_is_dir = ["usr/bin/oommf/app", "usr/bin/oommf/app/oxs/eamples"]
+
+ def get_oommf_source_root(self):
+ """If we download the source from NIST, then 'oommf.tcl' is in the root directory.
+ if we download from github, then it is in 'oommf/oommf.tcl'.
+
+ Here, we try to find the relative path to that file, and return it.
+ """
+ if "oommf.tcl" in os.listdir():
+ print(
+ "Found 'oommf.tcl' in " + os.getcwd() + " (looks like source from NIST)"
+ )
+ return "."
+ elif "oommf.tcl" in os.listdir("oommf"):
+ print(
+ "Found 'oommf.tcl' in "
+ + os.getcwd()
+ + "/oommf "
+ + "(looks like source from Github)"
+ )
+ return "oommf"
+ else:
+ raise ValueError("Cannot find 'oommf.tcl' in " + os.getcwd())
+
+ def get_oommf_path(self, prefix):
+ """Given the prefix, return the full path of the OOMMF installation
+ below `prefix`."""
+
+ oommfdir = os.path.join(prefix.usr.bin, "oommf")
+ return oommfdir
+
+ @property
+ def oommf_tcl_path(self):
+ return join_path(self.spec.prefix.bin, "oommf.tcl")
+
+ @property
+ def tclsh(self):
+ return Executable(join_path(self.spec["tcl"].prefix.bin, "tclsh"))
+
+ @property
+ def test_env(self):
+ """Create environment in which post-install tests can be run."""
+ # Make sure the correct OOMMF config.tcl is found.
+ # This environment variable (OOMMF_ROOT) seems not to be
+ # set at this point, so we have to set it manually for the test:
+ oommfdir = self.get_oommf_path(self.prefix)
+ test_env_ = {"OOMMF_ROOT": oommfdir}
+ return test_env_
+
+ def configure(self, spec, prefix):
+ # change into directory with source code
+ with working_dir(self.get_oommf_source_root()):
+
+ configure = Executable("./oommf.tcl pimake distclean")
+ configure()
+ configure2 = Executable("./oommf.tcl pimake upgrade")
+ configure2()
+
+ def build(self, spec, prefix):
+ with working_dir(self.get_oommf_source_root()):
+ make = Executable("./oommf.tcl pimake ")
+ make()
+
+ def install(self, spec, prefix):
+ # keep a copy of all the tcl files and everything oommf created.
+ # in OOMMF terminology, this is OOMMF_ROOT
+ # We are now using prefix/usr/bin/oommf for that location
+ # - is there a better place?
+ oommfdir = self.get_oommf_path(prefix)
+
+ with working_dir(self.get_oommf_source_root()):
+
+ install_tree(".", oommfdir)
+
+ # The one file that is used directly by the users should be
+ # available as the binary for the user:
+ install_files = ["oommf.tcl"]
+ mkdirp(prefix.bin)
+ for f in install_files:
+ install(os.path.join(oommfdir, f), prefix.bin)
+
+ def setup_run_environment(self, env):
+ # Set OOMMF_ROOT so that oommf.tcl can find its files.
+ oommfdir = self.get_oommf_path(self.prefix)
+ env.set("OOMMF_ROOT", oommfdir)
+
+ # set OOMMFTCL so ubermag / oommf can find oommf
+ env.set("OOMMFTCL", join_path(oommfdir, "oommf.tcl"))
+
+ def _check_install_oommf_command(self, oommf_args):
+ "Given a list of arguments for oommf.tcl, execute those."
+ print("Testing oommf.tcl with arguments: " + str(oommf_args))
+
+ test_env = self.test_env
+ # the "+platform" test needs the following environment variable:
+ if oommf_args == ["+platform"]:
+ test_env["PATH"] = os.environ["PATH"]
+
+ output = self.tclsh(
+ self.oommf_tcl_path,
+ *oommf_args,
+ output=str.split,
+ error=str.split,
+ env=test_env
+ )
+
+ print("output received from oommf is %s" % output)
+
+ @run_after("install")
+ def check_install_version(self):
+ self._check_install_oommf_command(["+version"])
+
+ @run_after("install")
+ def check_install_platform(self):
+ self._check_install_oommf_command(["+platform"])
+
+ @run_after("install")
+ def check_install_stdprob3(self):
+ oommf_examples = join_path(self.spec.prefix.usr.bin, "oommf/app/oxs/examples")
+ task = join_path(oommf_examples, "stdprob3.mif")
+ self._check_install_oommf_command(["boxsi", "+fg", "-kill", "all", task])
+
+ def test(self):
+ """Run these smoke tests when requested explicitly"""
+
+ # run "oommf +version"
+ spec = self.spec
+ exe = join_path(spec["tcl"].prefix.bin, "tclsh")
+ oommf_tcl_path = join_path(spec.prefix.bin, "oommf.tcl")
+ options = [oommf_tcl_path, "+version"]
+ purpose = "Check oommf.tcl can execute (+version)"
+ expected = ["info:"]
+
+ self.run_test(
+ exe,
+ options=options,
+ expected=expected,
+ status=[0],
+ installed=False,
+ purpose=purpose,
+ skip_missing=False,
+ work_dir=None,
+ )
+
+ # run "oommf +platform"
+ options = [oommf_tcl_path, "+platform"]
+ purpose = "Check oommf.tcl can execute (+platform)"
+ expected = [
+ "OOMMF threads",
+ "OOMMF release",
+ "OOMMF API index",
+ "Temp file directory",
+ ]
+ self.run_test(
+ exe,
+ options=options,
+ expected=expected,
+ status=[0],
+ installed=False,
+ purpose=purpose,
+ skip_missing=False,
+ work_dir=None,
+ )
+
+ # run standard problem 3 with oommf (about 30 seconds runtime)
+ purpose = "Testing oommf.tcl standard problem 3"
+ print(purpose)
+
+ oommf_examples = join_path(spec.prefix.usr.bin, "oommf/app/oxs/examples")
+ task = join_path(oommf_examples, "stdprob3.mif")
+
+ options = [oommf_tcl_path, "boxsi", "+fg", task, "-kill", "all"]
+
+ expected = ['End "stdprob3.mif"', "Mesh geometry: 32 x 32 x 32 = 32 768 cells"]
+ self.run_test(
+ exe,
+ options=options,
+ expected=expected,
+ status=[0],
+ installed=False,
+ purpose=purpose,
+ skip_missing=False,
+ work_dir=None,
+ )
diff --git a/var/spack/repos/builtin/packages/opa-psm2/package.py b/var/spack/repos/builtin/packages/opa-psm2/package.py
index 5a9044bbd0..3eb90f6168 100644
--- a/var/spack/repos/builtin/packages/opa-psm2/package.py
+++ b/var/spack/repos/builtin/packages/opa-psm2/package.py
@@ -12,6 +12,7 @@ class OpaPsm2(MakefilePackage):
homepage = "https://github.com/cornelisnetworks/opa-psm2"
url = "https://github.com/cornelisnetworks/opa-psm2/archive/PSM2_10.3-8.tar.gz"
+ version('11.2.206', sha256='08aa41f41bdb485ee037d3f7e32dd45e79858ce38e744d33b9db2af60e3c627a')
version('11.2.185', sha256='8c0446e989feb4a3822791e4a3687060916f7c4612d1e8e493879be66f10db09')
version('11.2.77', sha256='5cc33d1e19d871a5861efe0bb897526f404b4bf2b88ac58bb277db96ac5ecb54')
version('11.2.68', sha256='42e16a14fc8c90b50855dcea46af3315bee32fb1ae89d83060f9b2ebdce1ec26')
diff --git a/var/spack/repos/builtin/packages/open-iscsi/package.py b/var/spack/repos/builtin/packages/open-iscsi/package.py
index 89c902072e..29c30135bc 100644
--- a/var/spack/repos/builtin/packages/open-iscsi/package.py
+++ b/var/spack/repos/builtin/packages/open-iscsi/package.py
@@ -21,7 +21,7 @@ class OpenIscsi(MakefilePackage):
depends_on('gettext')
depends_on('uuid')
- depends_on('util-linux+libmount')
+ depends_on('util-linux')
depends_on('kmod')
depends_on('open-isns')
depends_on('libtool', type='build')
diff --git a/var/spack/repos/builtin/packages/open3d/package.py b/var/spack/repos/builtin/packages/open3d/package.py
new file mode 100644
index 0000000000..468f89d869
--- /dev/null
+++ b/var/spack/repos/builtin/packages/open3d/package.py
@@ -0,0 +1,113 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+
+from spack import *
+
+
+class Open3d(CMakePackage, CudaPackage):
+ """Open3D: A Modern Library for 3D Data Processing."""
+
+ homepage = "http://www.open3d.org/"
+ url = "https://github.com/isl-org/Open3D/archive/refs/tags/v0.13.0.tar.gz"
+ git = "https://github.com/isl-org/Open3D.git"
+
+ version('0.13.0', tag='v0.13.0', submodules=True)
+
+ variant('python', default=False, description='Build the Python module')
+
+ # http://www.open3d.org/docs/latest/compilation.html
+
+ depends_on('cmake@3.19:', type='build')
+ # depends_on('eigen')
+ # depends_on('flann')
+ # depends_on('fmt')
+ # depends_on('glew')
+ # depends_on('glfw')
+ # depends_on('imgui')
+ # depends_on('jpeg')
+ # depends_on('liblzf')
+ # depends_on('libpng')
+ # depends_on('py-pybind11')
+ # depends_on('qhull')
+ # depends_on('tinygltf')
+ # depends_on('tinyobjloader')
+
+ extends('python', when='+python', type=('build', 'link', 'run'))
+ depends_on('python@3.6:', when='+python', type=('build', 'link', 'run'))
+ depends_on('py-pip', when='+python', type='build')
+ depends_on('py-setuptools@40.8:', when='+python', type='build')
+ depends_on('py-wheel@0.36:', when='+python', type='build')
+ depends_on('py-numpy@1.18:', when='+python', type=('build', 'run'))
+ depends_on('py-pytest', when='+python', type='test')
+ depends_on('cuda@10.1:', when='+cuda')
+
+ # C++14 compiler required
+ conflicts('%gcc@:4')
+ conflicts('%clang@:6')
+
+ def patch(self):
+ # Force Python libraries to be installed to self.prefix
+ filter_file('pip install', 'pip install --prefix ' + self.prefix,
+ os.path.join('cpp', 'pybind', 'make_install_pip_package.cmake'))
+
+ def cmake_args(self):
+ args = [
+ self.define('BUILD_UNIT_TESTS', self.run_tests),
+ self.define_from_variant('BUILD_PYTHON_MODULE', 'python'),
+ self.define_from_variant('BUILD_CUDA_MODULE', 'cuda'),
+ # Use Spack-installed dependencies instead of vendored dependencies
+ # Numerous issues with using externally installed dependencies:
+ # https://github.com/isl-org/Open3D/issues/4333
+ # https://github.com/isl-org/Open3D/issues/4360
+ # self.define('USE_SYSTEM_EIGEN3', True),
+ # self.define('USE_SYSTEM_FLANN', True),
+ # self.define('USE_SYSTEM_FMT', True),
+ # self.define('USE_SYSTEM_GLEW', True),
+ # self.define('USE_SYSTEM_GLFW', True),
+ # self.define('USE_SYSTEM_IMGUI', True),
+ # self.define('USE_SYSTEM_JPEG', True),
+ # self.define('USE_SYSTEM_LIBLZF', True),
+ # self.define('USE_SYSTEM_PNG', True),
+ # self.define('USE_SYSTEM_PYBIND11', True),
+ # self.define('USE_SYSTEM_QHULL', True),
+ # self.define('USE_SYSTEM_TINYGLTF', True),
+ # self.define('USE_SYSTEM_TINYOBJLOADER', True),
+ ]
+
+ if '+python' in self.spec:
+ args.append(
+ self.define('PYTHON_EXECUTABLE', self.spec['python'].command.path))
+
+ return args
+
+ def check(self):
+ with working_dir(self.build_directory):
+ tests = Executable(os.path.join('bin', 'tests'))
+ tests()
+
+ def install(self, spec, prefix):
+ with working_dir(self.build_directory):
+ make('install')
+ if '+python' in spec:
+ make('install-pip-package')
+
+ # Tests don't pass unless all optional features are compiled, including PyTorch
+ # @run_after('install')
+ # @on_package_attributes(run_tests=True)
+ # def unit_test(self):
+ # if '+python' in self.spec:
+ # pytest = which('pytest')
+ # pytest(os.path.join('python', 'test'))
+
+ @run_after('install')
+ @on_package_attributes(run_tests=True)
+ def test(self):
+ if '+python' in self.spec:
+ self.run_test(self.spec['python'].command.path,
+ ['-c', 'import open3d'],
+ purpose='checking import of open3d',
+ work_dir='spack-test')
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index 0c267eb931..5beb8ce6b1 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -49,6 +49,7 @@ class Openblas(MakefilePackage):
variant('shared', default=True, description='Build shared libraries')
variant('consistent_fpcsr', default=False, description='Synchronize FP CSR between threads (x86/x86_64 only)')
variant('bignuma', default=False, description='Enable experimental support for up to 1024 CPUs/Cores and 128 numa nodes')
+ variant('symbol_suffix', default='none', description='Set a symbol suffix')
variant('locking', default=True, description='Build with thread safety')
variant(
@@ -302,6 +303,10 @@ class Openblas(MakefilePackage):
if '+ilp64' in self.spec:
make_defs += ['INTERFACE64=1']
+ suffix = self.spec.variants['symbol_suffix'].value
+ if suffix != 'none':
+ make_defs += ['SYMBOLSUFFIX={0}'.format(suffix)]
+
# Synchronize floating-point control and status register (FPCSR)
# between threads (x86/x86_64 only).
if '+consistent_fpcsr' in self.spec:
@@ -339,6 +344,19 @@ class Openblas(MakefilePackage):
return find_headers(['cblas', 'lapacke'], self.prefix.include)
@property
+ def libs(self):
+ spec = self.spec
+
+ # Look for openblas{symbol_suffix}
+ name = 'libopenblas'
+ search_shared = bool(spec.variants['shared'].value)
+ suffix = spec.variants['symbol_suffix'].value
+ if suffix != 'none':
+ name += suffix
+
+ return find_libraries(name, spec.prefix, shared=search_shared, recursive=True)
+
+ @property
def build_targets(self):
targets = ['libs', 'netlib']
diff --git a/var/spack/repos/builtin/packages/opencarp/package.py b/var/spack/repos/builtin/packages/opencarp/package.py
index 0777ed24aa..5d277e945a 100644
--- a/var/spack/repos/builtin/packages/opencarp/package.py
+++ b/var/spack/repos/builtin/packages/opencarp/package.py
@@ -18,7 +18,8 @@ class Opencarp(CMakePackage):
maintainers = ['MarieHouillon']
- version('8.1', commit='28eb2e97', submodules=False, no_cache=True, preferred=True)
+ version('8.2', commit='dbfd16fd', submodules=False, no_cache=True, preferred=True)
+ version('8.1', commit='28eb2e97', submodules=False, no_cache=True)
version('7.0', commit='78da9195', submodules=False, no_cache=True)
version('master', branch='master', submodules=False, no_cache=True)
@@ -40,7 +41,7 @@ class Opencarp(CMakePackage):
depends_on('py-carputils')
depends_on('meshtool')
# Use specific versions of carputils and meshtool for releases
- for ver in ['7.0', '8.1']:
+ for ver in ['8.2', '7.0', '8.1']:
depends_on('py-carputils@oc' + ver, when='@' + ver + ' +carputils')
depends_on('meshtool@oc' + ver, when='@' + ver + ' +meshtool')
diff --git a/var/spack/repos/builtin/packages/opencascade/package.py b/var/spack/repos/builtin/packages/opencascade/package.py
index 3ac5888975..22f5a4b53a 100644
--- a/var/spack/repos/builtin/packages/opencascade/package.py
+++ b/var/spack/repos/builtin/packages/opencascade/package.py
@@ -18,6 +18,8 @@ class Opencascade(CMakePackage):
maintainers = ['wdconinc']
+ version('7.6.0', extension='tar.gz',
+ sha256='e7f989d52348c3b3acb7eb4ee001bb5c2eed5250cdcceaa6ae97edc294f2cabd')
version('7.5.3', extension='tar.gz',
sha256='cc3d3fd9f76526502c3d9025b651f45b034187430f231414c97dda756572410b')
version('7.5.2', extension='tar.gz',
diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch
index c479fb450e..c479fb450e 100755..100644
--- a/var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch
+++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch
diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch
index d7786308d8..d7786308d8 100755..100644
--- a/var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch
+++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch
diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch
index d96011330d..d96011330d 100755..100644
--- a/var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch
+++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch
diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch
index f5341c62fa..f5341c62fa 100755..100644
--- a/var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch
+++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch
diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch
index e5eecd7092..e5eecd7092 100755..100644
--- a/var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch
+++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch
diff --git a/var/spack/repos/builtin/packages/openfoam/package.py b/var/spack/repos/builtin/packages/openfoam/package.py
index 146b8a838b..3c054b73d1 100644
--- a/var/spack/repos/builtin/packages/openfoam/package.py
+++ b/var/spack/repos/builtin/packages/openfoam/package.py
@@ -265,6 +265,8 @@ class Openfoam(Package):
version('develop', branch='develop', submodules='True')
version('master', branch='master', submodules='True')
+ version('2112', sha256='3e838731e79db1c288acc27aad8cc8a43d9dac1f24e5773e3b9fa91419a8c3f7')
+ version('2106_211215', sha256='08c0d0b90b43505693ff8838e827f09e14ec9fb475956ef53cc2206c736277b1')
version('2106', sha256='11e41e5b9a253ef592a8f6b79f6aded623b28308192d02cec1327078523b5a37')
version('2012_210414', sha256='5260aaa79f91aad58a3a305c1a12d0d48b10f12e37cd99a6fa561969b15ea09d')
version('2012', sha256='3d6e39e39e7ae61d321fbc6db6c3748e6e5e1c4886454207a7f1a7321469e65a')
@@ -559,6 +561,18 @@ class Openfoam(Package):
filter_file(r'trapFpe\s+\d+\s*;', 'trapFpe 0;',
controlDict, backup=False)
+ @when('@:2106 %aocc@3.2.0:')
+ @run_before('configure')
+ def make_amd_rules(self):
+ """Due to the change in the linker behavior in AOCC v3.2, it is now
+ issuing diagnostic messages for the unreferenced symbols in the
+ shared objects as it may lead to run time failures.
+ """
+ general_rules = 'wmake/rules/General'
+ src = join_path(general_rules, 'Clang')
+ filter_file('clang++', spack_cxx + ' -pthread', join_path(src, 'c++'),
+ backup=False, string=True)
+
@when('@1812: %fj')
@run_before('configure')
def make_fujitsu_rules(self):
@@ -932,8 +946,7 @@ class OpenfoamArch(object):
elif target == 'ppc64le':
platform += 'PPC64le'
elif platform == 'darwin':
- if target == 'x86_64':
- platform += '64'
+ platform += '64' # aarch64 or x86_64
# ... and others?
self.arch = platform
diff --git a/var/spack/repos/builtin/packages/openjpeg/package.py b/var/spack/repos/builtin/packages/openjpeg/package.py
index a6b628f3f1..1a6b81359e 100644
--- a/var/spack/repos/builtin/packages/openjpeg/package.py
+++ b/var/spack/repos/builtin/packages/openjpeg/package.py
@@ -31,7 +31,12 @@ class Openjpeg(CMakePackage):
version('1.5.2', sha256='3734e95edd0bef6e056815591755efd822228dc3cd866894e00a2c929026b16d')
version('1.5.1', sha256='6a42fcc23cb179f69a1e94429089e5a5926aee1ffe582a0a6bd91299d297e61a')
- depends_on('zlib')
+ variant('codec', default=False, description='Build the CODEC executables')
+
+ depends_on('zlib', when='+codec')
+ depends_on('libpng', when='+codec')
+ depends_on('libtiff', when='+codec')
+ depends_on('lcms', when='+codec')
# The problem with install name of the library on MacOs was fixed starting
# version 2.1.1: https://github.com/uclouvain/openjpeg/commit/b9a247b559e62e55f5561624cf4a19aee3c8afdc
@@ -56,3 +61,18 @@ class Openjpeg(CMakePackage):
def libs(self):
return find_libraries('libopenjp{0}'.format(self.version.up_to(1)),
root=self.prefix, recursive=True)
+
+ def cmake_args(self):
+ args = [
+ self.define_from_variant('BUILD_CODEC', 'codec'),
+ # MJ2 executables are disabled by default and we just make it
+ # explicit. Note that the executables require additional libraries
+ # as in the case '+codec', therefore, we will need to update the
+ # 'depends_on' directives when/if we introduce a variant that
+ # enables them.
+ self.define('BUILD_MJ2', False),
+ # Note that if the list of dependencies is incomplete, there is
+ # still a chance that the bundled third-party libraries get built.
+ self.define('BUILD_THIRDPARTY', False)
+ ]
+ return args
diff --git a/var/spack/repos/builtin/packages/openldap/package.py b/var/spack/repos/builtin/packages/openldap/package.py
index 7591a3d4b6..91c9d70ad8 100644
--- a/var/spack/repos/builtin/packages/openldap/package.py
+++ b/var/spack/repos/builtin/packages/openldap/package.py
@@ -17,8 +17,9 @@ class Openldap(AutotoolsPackage):
"""
homepage = "https://www.openldap.org/"
- url = "ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-2.4.48.tgz"
+ url = "https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-2.6.0.tgz"
+ version('2.6.0', sha256='b71c580eac573e9aba15d95f33dd4dd08f2ed4f0d7fc09e08ad4be7ed1e41a4f')
version('2.4.49', sha256='e3b117944b4180f23befe87d0dcf47f29de775befbc469dcf4ac3dab3311e56e')
version('2.4.48', sha256='d9523ffcab5cd14b709fcf3cb4d04e8bc76bb8970113255f372bc74954c6074d')
@@ -30,61 +31,73 @@ class Openldap(AutotoolsPackage):
values=('gnutls', 'openssl'), multi=False)
variant('perl', default=False, description='Perl backend to Slapd')
+ variant('sasl', default=True, description='Build with Cyrus SASL support')
+ variant('static', default=False, description='Build static libraries')
+ variant('shared', default=True, description='Build shared libraries')
+ variant('dynamic', default=True, description='Enable linking built binaries with dynamic libs')
+ variant('wt', default=False, description='Enable WiredTiger backend', when='@2.5.0:')
+ conflicts('~static', when='~shared')
depends_on('icu4c', when='+icu')
depends_on('gnutls', when='~client_only tls=gnutls')
depends_on('openssl', when='~client_only tls=openssl')
+ depends_on('openssl@1.1.1:', when='~client_only tls=openssl @2.6.0:')
depends_on('unixodbc', when='~client_only')
depends_on('postgresql', when='~client_only')
depends_on('berkeley-db', when='~client_only') # for slapd
# Recommended dependencies by Linux From Scratch
- # depends_on('cyrus-sasl', when='~client_only') # not avail. in spack yet
+ depends_on('cyrus-sasl', when='+sasl')
# depends_on('openslp', when='~client_only') # not avail. in spack yet
# depends_on('Pth', when='~client_only') # not avail. in spack yet
depends_on('perl', when='~client_only+perl') # for slapd
+ depends_on('groff', type='build')
+ depends_on('pkgconfig', type='build')
+ depends_on('wiredtiger', when='@2.6.0:')
# Ref: https://www.linuxfromscratch.org/blfs/view/svn/server/openldap.html
@when('+client_only')
def configure_args(self):
- return ['CPPFLAGS=-D_GNU_SOURCE',
- '--enable-static',
- '--enable-dynamic',
+ args = ['CPPFLAGS=-D_GNU_SOURCE',
'--disable-debug',
'--disable-slapd',
]
+ args += self.with_or_without('cyrus-sasl', variant='sasl')
+ args += self.enable_or_disable('static')
+ args += self.enable_or_disable('shared')
+ args += self.enable_or_disable('dynamic')
+ return args
@when('~client_only')
def configure_args(self):
# Ref: https://www.openldap.org/lists/openldap-technical/201009/msg00304.html
args = ['CPPFLAGS=-D_GNU_SOURCE', # fixes a build error, see Ref above
- '--enable-static',
'--disable-debug',
- '--with-cyrus-sasl',
- '--enable-dynamic',
'--enable-crypt',
'--enable-spasswd',
'--enable-slapd',
'--enable-modules',
'--enable-rlookups',
'--enable-backends=mod',
- '--disable-ndb',
'--disable-sql',
- '--disable-shell',
- '--disable-bdb',
- '--disable-hdb',
'--enable-overlays=mod',
]
- if '~client_only' in self.spec:
- if 'tls=gnutls' in self.spec:
- args.append('--with-tls=gnutls')
- if 'tls=openssl' in self.spec:
- args.append('--with-tls=openssl')
+ if self.spec.satisfies('@:2.5'):
+ args.extend(('--disable-ndb', '--disable-shell', '--disable-bdb',
+ '--disable-hdb'))
+
+ args += self.enable_or_disable('static')
+ args += self.enable_or_disable('shared')
+ args += self.enable_or_disable('dynamic')
+ args += self.with_or_without('cyrus-sasl', variant='sasl')
+ args.append('--with-tls=' + self.spec.variants['tls'].value)
+ if self.spec.satisfies('@2.6.0: tls=gnutls'):
+ args += ['--disable-autoca']
+
+ if self.spec.satisfies('@2.5.0:'):
+ args += self.enable_or_disable('wt')
- if '+perl' in self.spec:
- args.append('--enable-perl')
- else:
- args.append('--disable-perl')
+ args += self.enable_or_disable('perl')
return args
diff --git a/var/spack/repos/builtin/packages/openlibm/package.py b/var/spack/repos/builtin/packages/openlibm/package.py
new file mode 100644
index 0000000000..0d72a46fc2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openlibm/package.py
@@ -0,0 +1,33 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Openlibm(MakefilePackage):
+ """OpenLibm is an effort to have a high quality, portable, standalone C
+ mathematical library"""
+
+ homepage = "https://github.com/JuliaMath/openlibm"
+ url = "https://github.com/JuliaMath/openlibm/archive/refs/tags/v0.8.0.tar.gz"
+
+ maintainers = ['haampie']
+
+ version('0.8.0', sha256='03620768df4ca526a63dd675c6de95a5c9d167ff59555ce57a61c6bf49e400ee')
+ version('0.7.5', sha256='be983b9e1e40e696e8bbb7eb8f6376d3ca0ae675ae6d82936540385b0eeec15b')
+
+ def make(self, spec, prefix):
+ args = [
+ 'prefix={0}'.format(prefix),
+ 'USE_GCC={0}'.format('1' if self.compiler.name == 'gcc' else '0'),
+ 'USE_CLANG={0}'.format('1' if self.compiler.name == 'clang' else '0')
+ ]
+ make(*args)
+
+ def install(self, spec, prefix):
+ args = [
+ 'prefix={0}'.format(prefix),
+ ]
+ make('install', *args)
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index 0d48cfc523..3b5c9f6c82 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -39,10 +39,12 @@ class Openmpi(AutotoolsPackage):
version('master', branch='master', submodules=True)
# Current
- version('4.1.1', sha256='e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda') # libmpi.so.40.30.1
+ version('4.1.2', sha256='9b78c7cf7fc32131c5cf43dd2ab9740149d9d87cadb2e2189f02685749a6b527') # libmpi.so.40.30.2
# Still supported
+ version('4.1.1', sha256='e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda') # libmpi.so.40.30.1
version('4.1.0', sha256='73866fb77090819b6a8c85cb8539638d37d6877455825b74e289d647a39fd5b5') # libmpi.so.40.30.0
+ version('4.0.7', sha256='7d3ecc8389161eb721982c855f89c25dca289001577a01a439ae97ce872be997') # libmpi.so.40.20.7
version('4.0.6', sha256='94b7b59ae9860f3bd7b5f378a698713e7b957070fdff2c43453b6cbf8edb410c') # libmpi.so.40.20.6
version('4.0.5', sha256='c58f3863b61d944231077f344fe6b4b8fbb83f3d1bc93ab74640bf3e5acac009') # libmpi.so.40.20.5
version('4.0.4', sha256='47e24eb2223fe5d24438658958a313b6b7a55bb281563542e1afc9dec4a31ac4') # libmpi.so.40.20.4
@@ -241,6 +243,7 @@ class Openmpi(AutotoolsPackage):
description="Build support for the Singularity container")
variant('lustre', default=False,
description="Lustre filesystem library support")
+ variant('romio', default=True, description='Enable ROMIO support')
# Adding support to build a debug version of OpenMPI that activates
# Memchecker, as described here:
#
@@ -652,7 +655,8 @@ class Openmpi(AutotoolsPackage):
spec['slurm'].prefix))
else:
config_args.extend(self.with_or_without('pmi'))
- config_args += self.with_or_without('pmix', activation_value='prefix')
+ if spec.satisfies('+pmix'):
+ config_args.append('--with-pmix={0}'.format(spec['pmix'].prefix))
if spec.satisfies('@3.1.3:') or spec.satisfies('@3.0.3'):
if '+static' in spec:
config_args.append('--enable-static')
@@ -727,6 +731,9 @@ class Openmpi(AutotoolsPackage):
'--disable-mpi-java'
])
+ if '~romio' in spec:
+ config_args.append('--disable-io-romio')
+
# SQLite3 support
if spec.satisfies('@1.7.3:1'):
if '+sqlite3' in spec:
diff --git a/var/spack/repos/builtin/packages/openspeedshop-utils/package.py b/var/spack/repos/builtin/packages/openspeedshop-utils/package.py
index 9d25e04201..5f6dc6a859 100644
--- a/var/spack/repos/builtin/packages/openspeedshop-utils/package.py
+++ b/var/spack/repos/builtin/packages/openspeedshop-utils/package.py
@@ -39,8 +39,6 @@ class OpenspeedshopUtils(CMakePackage):
variant('runtime', default=False,
description="build only the runtime libraries and collectors.")
- variant('cti', default=False,
- description="Build MRNet with the CTI startup option")
variant('crayfe', default=False,
description="build only the FE tool using the runtime_dir \
to point to target build.")
@@ -107,9 +105,6 @@ class OpenspeedshopUtils(CMakePackage):
depends_on('cbtf-krell@develop+crayfe', when='@develop+crayfe', type=('build', 'link', 'run'))
depends_on('cbtf-krell@1.9.3:9999+crayfe', when='@2.4.0:9999+crayfe', type=('build', 'link', 'run'))
- depends_on('cbtf-krell@develop+cti', when='@develop+cti', type=('build', 'link', 'run'))
- depends_on('cbtf-krell@1.9.3:9999+cti', when='@2.4.0:9999+cti', type=('build', 'link', 'run'))
-
depends_on('cbtf-krell@develop+mpich', when='@develop+mpich', type=('build', 'link', 'run'))
depends_on('cbtf-krell@1.9.3:9999+mpich', when='@2.4.0:9999+mpich', type=('build', 'link', 'run'))
@@ -132,10 +127,7 @@ class OpenspeedshopUtils(CMakePackage):
depends_on("cbtf-argonavis@1.9.3:9999", when='@2.4.0:9999+cuda', type=('build', 'link', 'run'))
# For MRNet
- depends_on("mrnet@5.0.1-3:+cti", when='@develop+cti', type=('build', 'link', 'run'))
depends_on("mrnet@5.0.1-3:+lwthreads", when='@develop', type=('build', 'link', 'run'))
-
- depends_on("mrnet@5.0.1-3:+cti", when='@2.4.0:9999+cti', type=('build', 'link', 'run'))
depends_on("mrnet@5.0.1-3:+lwthreads", when='@2.4.0:9999', type=('build', 'link', 'run'))
patch('arm.patch', when='target=aarch64:')
diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py
index 22f94196e6..a123bdfcd6 100644
--- a/var/spack/repos/builtin/packages/openspeedshop/package.py
+++ b/var/spack/repos/builtin/packages/openspeedshop/package.py
@@ -32,8 +32,6 @@ class Openspeedshop(CMakePackage):
variant('runtime', default=False,
description="build only the runtime libraries and collectors.")
- variant('cti', default=False,
- description="Build MRNet with the CTI startup option")
variant('crayfe', default=False,
description="build only the FE tool using the runtime_dir \
to point to target build.")
@@ -105,9 +103,6 @@ class Openspeedshop(CMakePackage):
depends_on('cbtf-krell@develop+crayfe', when='@develop+crayfe', type=('build', 'link', 'run'))
depends_on('cbtf-krell@1.9.3:9999+crayfe', when='@2.4.0:9999+crayfe', type=('build', 'link', 'run'))
- depends_on('cbtf-krell@develop+cti', when='@develop+cti', type=('build', 'link', 'run'))
- depends_on('cbtf-krell@1.9.3:9999+cti', when='@2.4.0:9999+cti', type=('build', 'link', 'run'))
-
depends_on('cbtf-krell@develop+mpich', when='@develop+mpich', type=('build', 'link', 'run'))
depends_on('cbtf-krell@1.9.3:9999+mpich', when='@2.4.0:9999+mpich', type=('build', 'link', 'run'))
@@ -130,10 +125,7 @@ class Openspeedshop(CMakePackage):
depends_on("cbtf-argonavis@1.9.3:9999", when='@2.4.0:9999+cuda', type=('build', 'link', 'run'))
# For MRNet
- depends_on("mrnet@5.0.1-3:+cti", when='@develop+cti', type=('build', 'link', 'run'))
depends_on("mrnet@5.0.1-3:+lwthreads", when='@develop', type=('build', 'link', 'run'))
-
- depends_on("mrnet@5.0.1-3:+cti", when='@2.4.0:9999+cti', type=('build', 'link', 'run'))
depends_on("mrnet@5.0.1-3:+lwthreads", when='@2.4.0:9999', type=('build', 'link', 'run'))
patch('arm.patch', when='target=aarch64:')
diff --git a/var/spack/repos/builtin/packages/openturns/package.py b/var/spack/repos/builtin/packages/openturns/package.py
new file mode 100644
index 0000000000..7ab5405052
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openturns/package.py
@@ -0,0 +1,59 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Openturns(CMakePackage):
+ """OpenTURNS is a scientific C++ and Python library featuring an
+ internal data model and algorithms dedicated to the treatment of
+ uncertainties. The main goal of this library is to provide all
+ functionalities needed to treat uncertainties in studies with
+ industrial applications. Targeted users are all engineers who want
+ to introduce the probabilistic dimension in their so far
+ deterministic studies."""
+
+ homepage = "https://openturns.github.io/www/"
+ git = "https://github.com/openturns/openturns.git"
+ maintainers = ['liuyangzhuan']
+
+ version('master', branch='master')
+
+ variant('python', default=True, description='Build Python bindings')
+
+ extends('python', when='+python')
+
+ depends_on('mpi', type=('build', 'run'))
+ depends_on('lapack', type=('build', 'run'))
+ depends_on('cmake@2.8:', type='build')
+ depends_on('swig', type=('build', 'run'))
+ depends_on('py-numpy@1.7:', type=('build', 'run'))
+ depends_on('py-pandas', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('boost+system+serialization+thread', type=('build', 'run'))
+ depends_on('intel-tbb', type=('build', 'run'))
+ depends_on('py-cloudpickle', type=('build', 'run'))
+ depends_on('py-urllib3', type=('build', 'run'))
+
+ def cmake_args(self):
+ spec = self.spec
+
+ args = [
+ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
+ '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
+ '-DCMAKE_INSTALL_LIBDIR:STRING=%s' % self.prefix.lib,
+ '-DCMAKE_INSTALL_BINDIR:STRING=%s' % self.prefix.bin,
+ '-DLAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(";"),
+ ]
+
+ if '+python' in spec:
+ args.extend([
+ # By default picks up the system python not the Spack build
+ '-DPYTHON_EXECUTABLE={0}'.format(spec['python'].command.path),
+ # By default installs to the python prefix
+ '-DPYTHON_SITE_PACKAGES={0}'.format(site_packages_dir),
+ ])
+
+ return args
diff --git a/var/spack/repos/builtin/packages/oras/package.py b/var/spack/repos/builtin/packages/oras/package.py
new file mode 100644
index 0000000000..dea8a8693b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/oras/package.py
@@ -0,0 +1,47 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import find
+
+from spack import *
+
+
+class Oras(Package):
+ """ORAS means OCI Registry As Storage"""
+
+ homepage = "https://oras.land"
+ git = "https://github.com/oras-project/oras"
+ url = "https://github.com/oras-project/oras/archive/refs/tags/v0.12.0.tar.gz"
+
+ maintainers = ['vsoch']
+
+ version('main', branch="main")
+ version("0.12.0", sha256="5e19d61683a57b414efd75bd1b0290c941b8faace5fcc9d488f5e4aa674bf03e")
+
+ depends_on("go", type='build')
+
+ def setup_build_environment(self, env):
+ # Point GOPATH at the top of the staging dir for the build step.
+ env.prepend_path('GOPATH', self.stage.path)
+
+ def install(self, spec, prefix):
+ if self.spec.satisfies('platform=linux target=aarch64:'):
+ make("build-linux-arm64")
+ elif self.spec.satisfies('platform=linux'):
+ make("build-linux")
+ elif self.spec.satisfies('platform=darwin target=aarch64:'):
+ make("build-mac-arm64")
+ elif self.spec.satisfies('platform=darwin'):
+ make("build-mac")
+ elif self.spec.satisfies('platform=windows'):
+ make("build-windows")
+ mkdirp(prefix.bin)
+
+ oras = find("bin", "oras")
+ if not oras:
+ tty.die("Oras executable missing in bin.")
+ tty.debug("Found oras executable %s to move into install bin" % oras[0])
+ install(oras[0], prefix.bin)
diff --git a/var/spack/repos/builtin/packages/otf/package.py b/var/spack/repos/builtin/packages/otf/package.py
index 2fe4dde0de..743176242b 100644
--- a/var/spack/repos/builtin/packages/otf/package.py
+++ b/var/spack/repos/builtin/packages/otf/package.py
@@ -14,6 +14,8 @@ class Otf(AutotoolsPackage):
homepage = "http://tu-dresden.de/die_tu_dresden/zentrale_einrichtungen/zih/forschung/projekte/otf/index_html/document_view?set_language=en"
url = "https://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz"
+ maintainers = ['michaelkuhn']
+
version('1.12.5salmon', sha256='0a8427360dedb38e8ddca30f14d95f826420c550337c5a79dbb754904e194088')
depends_on('zlib')
@@ -23,7 +25,9 @@ class Otf(AutotoolsPackage):
args.append('--without-mpi')
args.append('--without-vtf3')
- args.append('--with-zlib')
- args.append('--with-zlibsymbols')
args.append('--without-zoidfs')
+
+ args.append('--with-zlib')
+ args.append('--with-zlib-dir={0}'.format(self.spec['zlib'].prefix))
+
return args
diff --git a/var/spack/repos/builtin/packages/pagmo2/package.py b/var/spack/repos/builtin/packages/pagmo2/package.py
new file mode 100644
index 0000000000..29f71963da
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pagmo2/package.py
@@ -0,0 +1,39 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Pagmo2(CMakePackage):
+ """Parallel Global Multiobjective Optimizer (and its Python alter ego
+ PyGMO) is a C++ / Python platform to perform parallel computations of
+ optimisation tasks (global and local) via the asynchronous generalized
+ island model."""
+
+ homepage = "https://esa.github.io/pagmo2/"
+ url = "https://github.com/esa/pagmo2/archive/v2.18.0.tar.gz"
+ git = "https://github.com/esa/pagmo2.git"
+ maintainers = ['liuyangzhuan']
+
+ version('master', branch='master')
+ version('2.18.0', sha256='5ad40bf3aa91857a808d6b632d9e1020341a33f1a4115d7a2b78b78fd063ae31')
+
+ depends_on('boost+system+serialization+thread')
+ depends_on('intel-tbb')
+ depends_on('mpi')
+ depends_on('cmake@3.1:', type='build')
+
+ variant('shared', default=True, description='Build shared libraries')
+
+ def cmake_args(self):
+ spec = self.spec
+
+ args = [
+ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
+ '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
+ self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
+ ]
+
+ return args
diff --git a/var/spack/repos/builtin/packages/palisade-development/package.py b/var/spack/repos/builtin/packages/palisade-development/package.py
new file mode 100644
index 0000000000..f32d521741
--- /dev/null
+++ b/var/spack/repos/builtin/packages/palisade-development/package.py
@@ -0,0 +1,70 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PalisadeDevelopment(CMakePackage):
+ """
+ PALISADE is a general lattice cryptography library that currently
+ includes efficient implementations of the following lattice cryptography
+ capabilities:
+
+ Fully Homomorphic Encryption (FHE):
+ - Brakerski/Fan-Vercauteren (BFV) scheme for integer arithmetic
+ - Brakerski-Gentry-Vaikuntanathan (BGV) scheme for integer arithmetic
+ - Cheon-Kim-Kim-Song (CKKS) scheme for real-number arithmetic
+ - Ducas-Micciancio (FHEW) and Chillotti-Gama-Georgieva-Izabachene
+ (TFHE) schemes for Boolean circuit evaluation
+
+ Multi-Party Extensions of FHE (to support multi-key FHE):
+ - Threshold FHE for BGV, BFV, and CKKS schemes
+ - Proxy Re-Encryption for BGV, BFV, and CKKS schemes
+ """
+
+ homepage = "https://gitlab.com/palisade/palisade-development"
+ git = "https://gitlab.com/palisade/palisade-development.git"
+ maintainers = ['wohlbier']
+
+ version('feature-fixed-point-encoding',
+ branch='feature-fixed-point-encoding')
+ version('master', branch='master', preferred=True)
+
+ variant('shared', default=True, description='Build shared library.')
+ variant('static', default=True, description='Build static library.')
+ variant('with_be2', default=True, description='Build with backend 2.')
+ variant('with_be4', default=True, description='Build with backend 4.')
+ variant('with_intel_hexl', default=False, description='Use Intel HEXL.')
+ variant('with_ntl', default=False, description='Build NTL.')
+
+ depends_on('autoconf')
+ depends_on('hwloc', when='%clang')
+ depends_on('ntl', when='+with_ntl')
+ depends_on('ntl+shared', when='+with_ntl +shared')
+
+ def cmake_args(self):
+ args = [
+ self.define_from_variant('BUILD_SHARED', 'shared'),
+ self.define_from_variant('BUILD_STATIC', 'static'),
+ self.define_from_variant('WITH_BE2', 'with_be2'),
+ self.define_from_variant('WITH_BE4', 'with_be4'),
+ self.define_from_variant('WITH_INTEL_HEXL', 'with_intel_hexl'),
+ self.define_from_variant('WITH_NTL', 'with_ntl')
+ ]
+ if self.spec.satisfies('%clang'):
+ OpenMP_C_FLAGS = "-fopenmp=libomp"
+ OpenMP_C_LIB_NAMES = "libomp"
+ args += [
+ self.define('OpenMP_C', 'clang'),
+ self.define('OpenMP_C_FLAGS', OpenMP_C_FLAGS),
+ self.define('OpenMP_C_LIB_NAMES', OpenMP_C_LIB_NAMES),
+ self.define('OpenMP_CXX', 'clang++'),
+ self.define('OpenMP_CXX_FLAGS', OpenMP_C_FLAGS),
+ self.define('OpenMP_CXX_LIB_NAMES', OpenMP_C_LIB_NAMES),
+ self.define('OpenMP_libomp_LIBRARY', 'libomp'),
+ self.define('OpenMP_libgomp_LIBRARY', 'libgomp'),
+ self.define('OpenMP_libiomp5_LIBRARY', 'libiomp5')
+ ]
+ return args
diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py
index a9ec5fd8d7..64082462a3 100644
--- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py
+++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py
@@ -177,7 +177,7 @@ class ParallelNetcdf(AutotoolsPackage):
return args
- examples_src_dir = 'examples/CXX'
+ examples_src_dir = join_path('examples', 'CXX')
@run_after('install')
def cache_test_sources(self):
@@ -186,19 +186,24 @@ class ParallelNetcdf(AutotoolsPackage):
self.cache_extra_test_sources([self.examples_src_dir])
def test(self):
- test_dir = join_path(self.install_test_root, self.examples_src_dir)
+ test_dir = join_path(self.test_suite.current_test_cache_dir,
+ self.examples_src_dir)
# pnetcdf has many examples to serve as a suitable smoke check.
# column_wise was chosen based on the E4S test suite. Other
# examples should work as well.
test_exe = 'column_wise'
- options = ['{0}.cpp'.format(test_exe), '-o', test_exe, '-lpnetcdf']
+ options = ['{0}.cpp'.format(test_exe), '-o', test_exe, '-lpnetcdf',
+ '-L{0}'.format(self.prefix.lib),
+ '-I{0}'.format(self.prefix.include)]
reason = 'test: compiling and linking pnetcdf example'
self.run_test(self.spec['mpi'].mpicxx, options, [],
installed=False, purpose=reason, work_dir=test_dir)
- mpiexe_list = ['mpirun', 'mpiexec', 'srun']
+ mpiexe_list = [self.spec['mpi'].prefix.bin.srun,
+ self.spec['mpi'].prefix.bin.mpirun,
+ self.spec['mpi'].prefix.bin.mpiexec]
for mpiexe in mpiexe_list:
if os.path.isfile(mpiexe):
- self.run_test(mpiexe, ['-n', '4', test_exe], [],
+ self.run_test(mpiexe, ['-n', '1', test_exe], [],
installed=False,
purpose='test: pnetcdf smoke test',
skip_missing=True,
diff --git a/var/spack/repos/builtin/packages/parallelio/package.py b/var/spack/repos/builtin/packages/parallelio/package.py
index 601f91089e..6ab05344f8 100644
--- a/var/spack/repos/builtin/packages/parallelio/package.py
+++ b/var/spack/repos/builtin/packages/parallelio/package.py
@@ -20,6 +20,7 @@ class Parallelio(CMakePackage):
version('2_5_2', sha256='935bc120ef3bf4fe09fb8bfdf788d05fb201a125d7346bf6b09e27ac3b5f345c')
variant('pnetcdf', default=False, description='enable pnetcdf')
+ variant('timing', default=False, description='enable GPTL timing')
depends_on('mpi')
depends_on('netcdf-c +mpi', type='link')
@@ -35,6 +36,7 @@ class Parallelio(CMakePackage):
def cmake_args(self):
define = self.define
+ define_from_variant = self.define_from_variant
spec = self.spec
env['CC'] = spec['mpi'].mpicc
env['FC'] = spec['mpi'].mpifc
@@ -50,4 +52,7 @@ class Parallelio(CMakePackage):
define('PnetCDF_C_PATH', spec['parallel-netcdf'].prefix),
define('PnetCDF_Fortran_PATH', spec['parallel-netcdf'].prefix),
])
+ args.extend([
+ define_from_variant('PIO_ENABLE_TIMING', 'timing'),
+ ])
return args
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
index 3fd99be7e2..e0ea37a4b2 100644
--- a/var/spack/repos/builtin/packages/paraview/package.py
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -27,7 +27,7 @@ class Paraview(CMakePackage, CudaPackage):
tags = ['e4s']
version('master', branch='master', submodules=True)
- version('5.10.0-RC1', sha256='468d02962abfd5869c46f32fd9dee3095cb00264237edf2659f09a1c0990ec37')
+ version('5.10.0-RC2', sha256='6523577d4f8d0be6182e53c6b59e176c44a051c5f7d743bbda612cc095b18ff6')
version('5.9.1', sha256='0d486cb6fbf55e428845c9650486f87466efcb3155e40489182a7ea85dfd4c8d', preferred=True)
version('5.9.0', sha256='b03258b7cddb77f0ee142e3e77b377e5b1f503bcabc02bfa578298c99a06980d')
version('5.8.1', sha256='7653950392a0d7c0287c26f1d3a25cdbaa11baa7524b0af0e6a1a0d7d487d034')
@@ -173,6 +173,14 @@ class Paraview(CMakePackage, CudaPackage):
# https://gitlab.kitware.com/paraview/paraview/-/merge_requests/4951
depends_on('cli11@1.9.1', when='@5.10:')
+ # ParaView depends on nlohmann-json due to changes in MR
+ # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8550
+ depends_on('nlohmann-json', when='@master')
+
+ # ParaView depends on proj@8.1.0 due to changes in MR
+ # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8474
+ depends_on('proj@8.1.0', when='@master')
+
patch('stl-reader-pv440.patch', when='@4.4.0')
# Broken gcc-detection - improved in 5.1.0, redundant later
@@ -207,7 +215,7 @@ class Paraview(CMakePackage, CudaPackage):
def paraview_subdir(self):
"""The paraview subdirectory name as paraview-major.minor"""
if self.spec.version == Version('master'):
- return 'paraview-5.9'
+ return 'paraview-5.10'
else:
return 'paraview-{0}'.format(self.spec.version.up_to(2))
@@ -318,7 +326,7 @@ class Paraview(CMakePackage, CudaPackage):
if spec.satisfies('@5.8:'):
cmake_args.extend([
'-DPARAVIEW_BUILD_EDITION:STRING=%s' %
- spec.variants['build_edition'].value,
+ spec.variants['build_edition'].value.upper(),
'-DPARAVIEW_USE_QT:BOOL=%s' % variant_bool('+qt'),
'-DPARAVIEW_BUILD_WITH_EXTERNAL=ON'])
if spec.satisfies('%cce'):
diff --git a/var/spack/repos/builtin/packages/parsimonator/nox86.patch b/var/spack/repos/builtin/packages/parsimonator/nox86.patch
index afc17c3e35..afc17c3e35 100755..100644
--- a/var/spack/repos/builtin/packages/parsimonator/nox86.patch
+++ b/var/spack/repos/builtin/packages/parsimonator/nox86.patch
diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py
index 0c10743d6b..44e97e6ae5 100644
--- a/var/spack/repos/builtin/packages/patchelf/package.py
+++ b/var/spack/repos/builtin/packages/patchelf/package.py
@@ -16,23 +16,30 @@ class Patchelf(AutotoolsPackage):
list_url = "https://nixos.org/releases/patchelf/"
list_depth = 1
- version('0.13', sha256='4c7ed4bcfc1a114d6286e4a0d3c1a90db147a4c3adda1814ee0eee0f9ee917ed')
- version('0.12', sha256='699a31cf52211cf5ad6e35a8801eb637bc7f3c43117140426400d67b7babd792')
- version('0.11', sha256='e52378cc2f9379c6e84a04ac100a3589145533a7b0cd26ef23c79dfd8a9038f9')
- version('0.10', sha256='b2deabce05c34ce98558c0efb965f209de592197b2c88e930298d740ead09019')
- version('0.9', sha256='f2aa40a6148cb3b0ca807a1bf836b081793e55ec9e5540a5356d800132be7e0a')
- version('0.8', sha256='14af06a2da688d577d64ff8dac065bb8903bbffbe01d30c62df7af9bf4ce72fe')
+ maintainers = ['haampie']
- # Fixes a bug where patchelf errors with 'unsupported overlap
- # of SHT_NOTE and PT_NOTE'
- patch('https://github.com/NixOS/patchelf/pull/230.patch', sha256='a155f233b228f02d7886e304cb13898d93801b52f351e098c2cc0719697ec9d0', when='@0.12')
+ version('0.14.1', sha256='7a1506caf6873a2b60e7bebc35e1671fa232ee075642b074106b0d0636417466')
+ version('0.14', sha256='a31f2bff841dffa896317d3837bc2877c1f79da0744d88e459662d8e7fe7897c')
+ version('0.13.1', sha256='08c0237e89be74d61ddf8f6ff218439cdd62af572d568fb38913b53e222831de')
+ version('0.13', sha256='4c7ed4bcfc1a114d6286e4a0d3c1a90db147a4c3adda1814ee0eee0f9ee917ed')
+ version('0.12', sha256='699a31cf52211cf5ad6e35a8801eb637bc7f3c43117140426400d67b7babd792')
+ version('0.11', sha256='e52378cc2f9379c6e84a04ac100a3589145533a7b0cd26ef23c79dfd8a9038f9')
+ version('0.10', sha256='b2deabce05c34ce98558c0efb965f209de592197b2c88e930298d740ead09019')
+ version('0.9', sha256='f2aa40a6148cb3b0ca807a1bf836b081793e55ec9e5540a5356d800132be7e0a')
+ version('0.8', sha256='14af06a2da688d577d64ff8dac065bb8903bbffbe01d30c62df7af9bf4ce72fe')
conflicts('%gcc@:4.6', when='@0.10:', msg="Requires C++11 support")
+ conflicts('%gcc@:6', when='@0.14:', msg="Requires C++17 support")
+ conflicts('%clang@:3', when='@0.14:', msg="Requires C++17 support")
def url_for_version(self, version):
if version < Version('0.12'):
return "https://nixos.org/releases/patchelf/patchelf-{0}/patchelf-{1}.tar.gz".format(version, version)
+ # Prefer gz over bz2
+ if version >= Version('0.13.1'):
+ return "https://github.com/NixOS/patchelf/releases/download/{0}/patchelf-{1}.tar.gz".format(version, version)
+
return "https://github.com/NixOS/patchelf/releases/download/{0}/patchelf-{1}.tar.bz2".format(version, version)
def test(self):
diff --git a/var/spack/repos/builtin/packages/pcma/package.py b/var/spack/repos/builtin/packages/pcma/package.py
index 095247dcb5..c656683f8e 100644
--- a/var/spack/repos/builtin/packages/pcma/package.py
+++ b/var/spack/repos/builtin/packages/pcma/package.py
@@ -18,6 +18,10 @@ class Pcma(MakefilePackage):
def edit(self, spec, prefix):
makefile = FileFilter('makefile')
makefile.filter('gcc', spack_cc)
+ if spec.satisfies('%gcc@10:'):
+ # they missed one
+ filter_file(r'^sint \*seqlen_array;$', 'extern sint *seqlen_array;',
+ 'calctree.c')
def install(self, spec, prefix):
mkdirp(prefix.bin)
diff --git a/var/spack/repos/builtin/packages/pcre/package.py b/var/spack/repos/builtin/packages/pcre/package.py
index 67cc84084f..9e1d74e7b2 100644
--- a/var/spack/repos/builtin/packages/pcre/package.py
+++ b/var/spack/repos/builtin/packages/pcre/package.py
@@ -12,7 +12,7 @@ class Pcre(AutotoolsPackage):
pattern matching using the same syntax and semantics as Perl 5."""
homepage = "https://www.pcre.org"
- url = "https://ftp.pcre.org/pub/pcre/pcre-8.42.tar.bz2"
+ url = "https://sourceforge.net/projects/pcre/files/pcre/8.42/pcre-8.42.tar.bz2/download"
version('8.44', sha256='19108658b23b3ec5058edc9f66ac545ea19f9537234be1ec62b714c84399366d')
version('8.43', sha256='91e762520003013834ac1adb4a938d53b22a216341c061b0cf05603b290faf6b')
@@ -22,6 +22,7 @@ class Pcre(AutotoolsPackage):
version('8.39', sha256='b858099f82483031ee02092711689e7245586ada49e534a06e678b8ea9549e8b')
version('8.38', sha256='b9e02d36e23024d6c02a2e5b25204b3a4fa6ade43e0a5f869f254f49535079df')
+ maintainers = ['drkennetz']
patch('intel.patch', when='@8.38')
variant('jit', default=False,
diff --git a/var/spack/repos/builtin/packages/percept/package.py b/var/spack/repos/builtin/packages/percept/package.py
index 13c4e1a667..681bcf9518 100644
--- a/var/spack/repos/builtin/packages/percept/package.py
+++ b/var/spack/repos/builtin/packages/percept/package.py
@@ -25,7 +25,7 @@ class Percept(CMakePackage):
depends_on('opennurbs@percept')
depends_on('boost+graph+mpi')
depends_on('yaml-cpp+pic~shared@0.5.3:')
- depends_on('trilinos~shared+exodus+mpi+tpetra+epetra+epetraext+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist+superlu+hdf5+zlib+aztec+sacado~openmp+shards+intrepid@master,12.14.1:')
+ depends_on('trilinos~shared+exodus+mpi+tpetra+epetra+epetraext+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist+superlu+hdf5+aztec+sacado~openmp+shards+intrepid@master,12.14.1:')
def cmake_args(self):
spec = self.spec
diff --git a/var/spack/repos/builtin/packages/percona-server/package.py b/var/spack/repos/builtin/packages/percona-server/package.py
index 22095246ab..57226e5c9f 100644
--- a/var/spack/repos/builtin/packages/percona-server/package.py
+++ b/var/spack/repos/builtin/packages/percona-server/package.py
@@ -21,7 +21,8 @@ class PerconaServer(CMakePackage):
depends_on('openssl')
depends_on('ncurses')
depends_on('readline')
- depends_on('openldap')
+ # Links to libldap_r, which was merged with libldap in OpenLDAP 2.5
+ depends_on('openldap@:2.4')
depends_on('libtirpc')
depends_on('curl')
depends_on('bison', type='build')
diff --git a/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py b/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py
index d968942c09..f6328a0dfd 100644
--- a/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py
+++ b/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py
@@ -10,10 +10,10 @@ class PerlDbdMysql(PerlPackage):
"""MySQL driver for the Perl5 Database Interface (DBI)"""
homepage = "https://metacpan.org/pod/DBD::mysql"
- url = "http://search.cpan.org/CPAN/authors/id/M/MI/MICHIELB/DBD-mysql-4.043.tar.gz"
+ url = "https://search.cpan.org/CPAN/authors/id/M/MI/MICHIELB/DBD-mysql-4.043.tar.gz"
version('4.043', sha256='629f865e8317f52602b2f2efd2b688002903d2e4bbcba5427cb6188b043d6f99')
depends_on('perl-test-deep', type=('build', 'run'))
depends_on('perl-dbi', type=('build', 'run'))
- depends_on('mariadb@:10.1.23')
+ depends_on('mysql-client')
diff --git a/var/spack/repos/builtin/packages/perl-forks/package.py b/var/spack/repos/builtin/packages/perl-forks/package.py
index 7fbd843ac2..16e69a6b6a 100644
--- a/var/spack/repos/builtin/packages/perl-forks/package.py
+++ b/var/spack/repos/builtin/packages/perl-forks/package.py
@@ -19,3 +19,7 @@ class PerlForks(PerlPackage):
depends_on('perl-devel-symdump', type=('build', 'run'))
depends_on('perl-list-moreutils', type=('build', 'run'))
depends_on('perl-sys-sigaction', type=('build', 'run'))
+
+ def setup_build_environment(self, env):
+ if 'perl~threads' in self.spec:
+ env.set('FORKS_SIMULATE_USEITHREADS', '1')
diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py
index ddd732c5dd..72c6c375e3 100644
--- a/var/spack/repos/builtin/packages/perl/package.py
+++ b/var/spack/repos/builtin/packages/perl/package.py
@@ -15,6 +15,7 @@ import os
import re
from contextlib import contextmanager
+from llnl.util import tty
from llnl.util.lang import match_predicate
from spack import *
@@ -299,8 +300,21 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
# This is to avoid failures when using -mmacosx-version-min=11.1
# since not all Apple Clang compilers support that version range
# See https://eclecticlight.co/2020/07/21/big-sur-is-both-10-16-and-11-0-its-official/
+ # It seems that this is only necessary for older versions of the
+ # command line tools rather than the xcode/clang version.
if spec.satisfies('os=bigsur'):
- env.set('SYSTEM_VERSION_COMPAT', 1)
+ pkgutil = Executable('pkgutil')
+ output = pkgutil('--pkg-info=com.apple.pkg.CLTools_Executables',
+ output=str, error=str, fail_on_error=False)
+ match = re.search(r'version:\s*([0-9.]+)', output)
+ if not match:
+ tty.warn('Failed to detect macOS command line tools version: '
+ + output)
+ else:
+ if Version(match.group(1)) < Version('12'):
+ tty.warn("Setting SYSTEM_VERSION_COMPAT=1 due to older "
+ "command line tools version")
+ env.set('SYSTEM_VERSION_COMPAT', 1)
# This is how we tell perl the locations of bzip and zlib.
env.set('BUILD_BZIP2', 0)
diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py
index 75646e4cf4..8c60a31675 100644
--- a/var/spack/repos/builtin/packages/petsc/package.py
+++ b/var/spack/repos/builtin/packages/petsc/package.py
@@ -20,6 +20,7 @@ class Petsc(Package, CudaPackage, ROCmPackage):
version('main', branch='main')
+ version('3.16.2', sha256='7ab257ae150d4837ac8d3872a1d206997962578785ec2427639ceac46d131bbc')
version('3.16.1', sha256='909cf7bce7b6a0ddb2580a1ac9502aa01631ec4105c716594c1804f0ee1ea06a')
version('3.16.0', sha256='5aaad7deea127a4790c8aa95c42fd9451ab10b5d6c68b226b92d4853002f438d')
version('3.15.5', sha256='67dc31f1c1c941a0e45301ed4042628586e92e8c4e9b119695717ae782ef23a3')
@@ -164,6 +165,8 @@ class Petsc(Package, CudaPackage, ROCmPackage):
description='Activates support for openmp')
variant('hwloc', default=False,
description='Activates support for hwloc')
+ variant('kokkos', default=False,
+ description='Activates support for kokkos and kokkos-kernels')
# 3.8.0 has a build issue with MKL - so list this conflict explicitly
conflicts('^intel-mkl', when='@3.8.0')
@@ -183,6 +186,8 @@ class Petsc(Package, CudaPackage, ROCmPackage):
conflicts('+ptscotch', when='~mpi', msg=mpi_msg)
conflicts('+superlu-dist', when='~mpi', msg=mpi_msg)
conflicts('+trilinos', when='~mpi', msg=mpi_msg)
+ conflicts('+kokkos', when='~mpi', msg=mpi_msg)
+ conflicts('^openmpi~cuda', when='+cuda') # +cuda requires CUDA enabled OpenMPI
# older versions of petsc did not support mumps when +int64
conflicts('+mumps', when='@:3.12+int64')
@@ -216,6 +221,12 @@ class Petsc(Package, CudaPackage, ROCmPackage):
depends_on('hip', when='+rocm')
depends_on('hipblas', when='+rocm')
depends_on('hipsparse', when='+rocm')
+ depends_on('rocsparse', when='+rocm')
+ depends_on('rocsolver', when='+rocm')
+ depends_on('rocblas', when='+rocm')
+ depends_on('rocrand', when='+rocm')
+ depends_on('rocthrust', when='+rocm')
+ depends_on('rocprim', when='+rocm')
# Build dependencies
depends_on('python@2.6:2.8', type='build', when='@:3.10')
@@ -312,6 +323,11 @@ class Petsc(Package, CudaPackage, ROCmPackage):
depends_on('saws', when='+saws')
depends_on('libyaml', when='+libyaml')
depends_on('hwloc', when='+hwloc')
+ depends_on('kokkos', when='+kokkos')
+ depends_on('kokkos-kernels', when='+kokkos')
+ depends_on('kokkos+cuda+wrapper+cuda_lambda', when='+kokkos +cuda')
+ depends_on('kokkos-kernels+cuda', when='+kokkos +cuda')
+ depends_on('kokkos+rocm', when='+kokkos +rocm')
# Using the following tarballs
# * petsc-3.12 (and older) - includes docs
@@ -387,7 +403,6 @@ class Petsc(Package, CudaPackage, ROCmPackage):
options.append('--with-x=0')
if 'trilinos' in spec:
- options.append('--with-cxx-dialect=C++11')
if spec.satisfies('^trilinos+boost'):
options.append('--with-boost=1')
@@ -406,10 +421,12 @@ class Petsc(Package, CudaPackage, ROCmPackage):
# if not (useinc || uselib): usedir - i.e (False, False)
for library in (
('cuda', 'cuda', False, False),
- ('hip', 'hip', False, False),
+ ('hip', 'hip', True, False),
'metis',
'hypre',
'parmetis',
+ ('kokkos', 'kokkos', False, False),
+ ('kokkos-kernels', 'kokkos-kernels', False, False),
('superlu-dist', 'superlu_dist', True, True),
('scotch', 'ptscotch', True, True),
('suite-sparse:umfpack,klu,cholmod,btf,ccolamd,colamd,camd,amd, \
@@ -486,9 +503,25 @@ class Petsc(Package, CudaPackage, ROCmPackage):
else:
options.append('CUDAFLAGS=-gencode arch=compute_{0},code=sm_{0}'
.format(cuda_arch[0]))
+ if '+rocm' in spec:
+ if not spec.satisfies('amdgpu_target=none'):
+ hip_arch = spec.variants['amdgpu_target'].value
+ options.append('--with-hip-arch={0}'.format(hip_arch[0]))
+ hip_pkgs = ['hipsparse', 'hipblas', 'rocsparse', 'rocsolver', 'rocblas']
+ hip_ipkgs = hip_pkgs + ['rocthrust', 'rocprim']
+ hip_lpkgs = hip_pkgs + ['rocrand']
+ hip_inc = ''
+ hip_lib = ''
+ for pkg in hip_ipkgs:
+ hip_inc += spec[pkg].headers.include_flags + ' '
+ for pkg in hip_lpkgs:
+ hip_lib += spec[pkg].libs.joined() + ' '
+ options.append('HIPPPFLAGS=%s' % hip_inc)
+ options.append('with-hip-lib=%s -L%s -lamdhip64' %
+ (hip_lib, spec['hip'].prefix.lib))
if 'superlu-dist' in spec:
- if spec.satisfies('@3.10.3:'):
+ if spec.satisfies('@3.10.3:3.15'):
options.append('--with-cxx-dialect=C++11')
if '+mkl-pardiso' in spec:
@@ -501,10 +534,16 @@ class Petsc(Package, CudaPackage, ROCmPackage):
if '+hpddm' in spec:
options.append('--download-hpddm')
+ # revert changes by kokkos-nvcc-wrapper
+ if spec.satisfies('^kokkos+cuda+wrapper'):
+ env['MPICH_CXX'] = env['CXX']
+ env['OMPI_CXX'] = env['CXX']
+ env['MPICXX_CXX'] = env['CXX']
+
python('configure', '--prefix=%s' % prefix, *options)
# PETSc has its own way of doing parallel make.
- make('MAKE_NP=%s' % make_jobs, parallel=False)
+ make('V=1 MAKE_NP=%s' % make_jobs, parallel=False)
make("install")
if self.run_tests:
@@ -527,6 +566,11 @@ class Petsc(Package, CudaPackage, ROCmPackage):
env.unset('PETSC_ARCH')
@property
+ def archive_files(self):
+ return [join_path(self.stage.source_path, 'configure.log'),
+ join_path(self.stage.source_path, 'make.log')]
+
+ @property
def headers(self):
return find_headers('petsc', self.prefix.include, recursive=False) \
or None # return None to indicate failure
@@ -538,6 +582,7 @@ class Petsc(Package, CudaPackage, ROCmPackage):
"""Copy the build test files after the package is installed to an
install test subdirectory for use during `spack test run`."""
self.cache_extra_test_sources('src/ksp/ksp/tutorials')
+ self.cache_extra_test_sources('src/snes/tutorials')
def test(self):
# solve Poisson equation in 2D to make sure nothing is broken:
@@ -578,3 +623,12 @@ class Petsc(Package, CudaPackage, ROCmPackage):
'-use_gpu_aware_mpi', '0']
self.run_test(runexe, runopt + testexe)
make('clean', parallel=False)
+ w_dir = join_path(self.install_test_root, 'src/snes/tutorials')
+ with working_dir(w_dir):
+ if '+kokkos' in spec:
+ make('ex3k', parallel=False)
+ testexe = ['ex3k', '-view_initial', '-dm_vec_type', 'kokkos',
+ '-dm_mat_type', 'aijkokkos', '-use_gpu_aware_mpi', '0',
+ '-snes_monitor']
+ self.run_test(runexe, runopt + testexe)
+ make('clean', parallel=False)
diff --git a/var/spack/repos/builtin/packages/pgplot/g77_gcc.conf.patch b/var/spack/repos/builtin/packages/pgplot/g77_gcc.conf.patch
index f97ed8ab49..970527afe8 100644
--- a/var/spack/repos/builtin/packages/pgplot/g77_gcc.conf.patch
+++ b/var/spack/repos/builtin/packages/pgplot/g77_gcc.conf.patch
@@ -78,7 +78,7 @@
# Mandatory.
# On systems that have a ranlib utility, put "ranlib" here. On other
-@@ -108,7 +108,7 @@
+@@ -108,16 +108,16 @@
# Optional: Needed if SHARED_LIB is set.
# How to create a shared library from a trailing list of object files.
@@ -87,3 +87,13 @@
# Optional:
# On systems such as Solaris 2.x, that allow specification of the
+ # libraries that a shared library needs to be linked with when a
+ # program that uses it is run, this variable should contain the
+ # library-specification flags used to specify these libraries to
+ # $SHARED_LD
+
+- SHARED_LIB_LIBS=""
++ SHARED_LIB_LIBS="@SHARED_LIB_LIBS@"
+
+ # Optional:
+ # Compiler name used on Next systems to compile objective-C files.
diff --git a/var/spack/repos/builtin/packages/pgplot/package.py b/var/spack/repos/builtin/packages/pgplot/package.py
index a042fe2159..92fc29f591 100644
--- a/var/spack/repos/builtin/packages/pgplot/package.py
+++ b/var/spack/repos/builtin/packages/pgplot/package.py
@@ -7,7 +7,7 @@ from spack import *
class Pgplot(MakefilePackage):
- """PGPLOT Graphics Subroutine Library
+ """PGPLOT Graphics Subroutine Library.
The PGPLOT Graphics Subroutine Library is a Fortran- or
C-callable, device-independent graphics package for making
@@ -51,6 +51,12 @@ class Pgplot(MakefilePackage):
def edit(self, spec, prefix):
+ libs = ''
+ if '+X' in spec:
+ libs += ' ' + self.spec['X11'].libs.ld_flags
+ if '+png' in spec:
+ libs += ' ' + self.spec['libpng'].libs.ld_flags
+
if spec.satisfies('%gcc'):
fib = " -fallow-invalid-boz" if spec.satisfies('%gcc@10:') else ""
@@ -61,9 +67,10 @@ class Pgplot(MakefilePackage):
'@CFLAGD@': "-O2",
'@FCOMPL@': spack_fc,
'@FFLAGC@': "-Wall -fPIC -O -ffixed-line-length-none" + fib,
- '@FFLAGD@': "-fno-backslash",
- '@LIBS@': "-lgfortran",
- '@SHARED_LD@': spack_cc + " -shared -o $SHARED_LIB -lgfortran"
+ '@FFLAGD@': libs + " -fno-backslash",
+ '@LIBS@': libs + " -lgfortran",
+ '@SHARED_LD@': spack_cc + " -shared -o $SHARED_LIB",
+ '@SHARED_LIB_LIBS@': libs + " -lgfortran",
}
elif spec.satisfies('%intel'):
sub = {
@@ -72,9 +79,10 @@ class Pgplot(MakefilePackage):
'@CFLAGD@': "-O2 -lifcore -lifport",
'@FCOMPL@': spack_fc,
'@FFLAGC@': "-fPIC",
- '@FFLAGD@': "-nofor-main",
- '@LIBS@': "-nofor-main -lifcore -lifport",
- '@SHARED_LD@': spack_cc + " -shared -o $SHARED_LIB"
+ '@FFLAGD@': libs + " -nofor-main",
+ '@LIBS@': libs + " -nofor-main -lifcore -lifport",
+ '@SHARED_LD@': spack_cc + " -shared -o $SHARED_LIB",
+ '@SHARED_LIB_LIBS@': libs + " -nofor-main -lifcore -lifport",
}
conf = join_path(
@@ -91,14 +99,12 @@ class Pgplot(MakefilePackage):
enable_driver('! XWDRIV 1 /XWINDOW')
enable_driver('! XWDRIV 2 /XSERVE')
- sub['@FFLAGD@'] += ' -L{0} -lX11'.format(self.spec['libx11'].prefix.lib)
- sub['@LIBS@'] += ' -L{0} -lX11'.format(self.spec['libx11'].prefix.lib)
-
if '+png' in spec:
enable_driver('! PNDRIV 1 /PNG')
filter_file('pndriv.o : ./png.h ./pngconf.h ./zlib.h ./zconf.h',
- 'pndriv.o :', 'makemake')
+ 'pndriv.o :',
+ 'makemake')
# Alwasy enable PS and LATEX since they are not depending on other libraries.
enable_driver('! PSDRIV 1 /PS')
@@ -115,8 +121,10 @@ class Pgplot(MakefilePackage):
filter_file(key, value, conf)
def setup_build_environment(self, env):
+ if '+X' in self.spec:
+ env.append_flags('LIBS', self.spec['X11'].libs.ld_flags)
if '+png' in self.spec:
- env.set('LIBS', self.spec['libpng'].libs.ld_flags)
+ env.append_flags('LIBS', self.spec['libpng'].libs.ld_flags)
def build(self, spec, prefix):
makemake = which('./makemake')
diff --git a/var/spack/repos/builtin/packages/php/sbang.patch b/var/spack/repos/builtin/packages/php/sbang.patch
index aca17c8ff3..aca17c8ff3 100755..100644
--- a/var/spack/repos/builtin/packages/php/sbang.patch
+++ b/var/spack/repos/builtin/packages/php/sbang.patch
diff --git a/var/spack/repos/builtin/packages/phyluce/package.py b/var/spack/repos/builtin/packages/phyluce/package.py
index 5fb589f485..cd23147bf8 100644
--- a/var/spack/repos/builtin/packages/phyluce/package.py
+++ b/var/spack/repos/builtin/packages/phyluce/package.py
@@ -31,7 +31,7 @@ class Phyluce(PythonPackage):
depends_on('mafft', type='run')
depends_on('muscle', type='run')
depends_on('picard', type='run')
- depends_on('raxml+pthreads+sse', type='run')
+ depends_on('raxml+pthreads', type='run')
depends_on('samtools', type='run')
depends_on('seqtk', type='run')
depends_on('spades', type='run')
diff --git a/var/spack/repos/builtin/packages/pixz/package.py b/var/spack/repos/builtin/packages/pixz/package.py
index 4f1663f332..0b6738293a 100644
--- a/var/spack/repos/builtin/packages/pixz/package.py
+++ b/var/spack/repos/builtin/packages/pixz/package.py
@@ -12,6 +12,7 @@ class Pixz(AutotoolsPackage):
homepage = "https://www.github.com/vasi/pixz"
url = "https://github.com/vasi/pixz/releases/download/v1.0.6/pixz-1.0.6.tar.xz"
+ version('1.0.7', sha256='e5e32c6eb0bf112b98e74a5da8fb63b9f2cae71800f599d97ce540e150c8ddc5')
version('1.0.6', sha256='02c50746b134fa1b1aae41fcc314d7c6f1919b3d48bcdea01bf11769f83f72e8')
depends_on('xz')
diff --git a/var/spack/repos/builtin/packages/podman/package.py b/var/spack/repos/builtin/packages/podman/package.py
new file mode 100644
index 0000000000..f759773bff
--- /dev/null
+++ b/var/spack/repos/builtin/packages/podman/package.py
@@ -0,0 +1,81 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Podman(Package):
+ """An optionally rootless and daemonless container engine: alias docker=podman"""
+
+ homepage = 'https://podman.io'
+ url = 'https://github.com/containers/podman/archive/v3.4.2.tar.gz'
+ maintainers = ['bernhardkaindl']
+
+ version('3.4.2', sha256='b0c4f9a11eb500b1d440d5e51a6c0c632aa4ac458e2dc0362f50f999eb7fbf31')
+
+ depends_on('go', type='build')
+ depends_on('go-md2man', type='build')
+ depends_on('pkgconfig', type='build')
+ depends_on('cni-plugins', type='run')
+ depends_on('conmon', type='run')
+ depends_on('runc', type='run')
+ depends_on('slirp4netns', type='run')
+ depends_on('gpgme')
+ depends_on('libassuan')
+ depends_on('libgpg-error')
+ depends_on('libseccomp')
+
+ def patch(self):
+ defs = FileFilter('vendor/github.com/containers/common/pkg/config/default.go')
+
+ # Prepend the provided runc executable to podman's built-in runc search path
+ defs.filter(
+ '"runc": {',
+ '"runc": {' + '"{0}",'.format(self.spec['runc'].prefix.sbin.runc)
+ )
+ # Prepend the provided conmon executable to podman's built-in conmon search path
+ defs.filter(
+ r'ConmonPath = \[\]string{',
+ 'ConmonPath = []string{' +
+ '\n "{0}",'.format(self.spec['conmon'].prefix.bin.conmon)
+ )
+ # Prepend the provided cni-plugins directory to the cni-plugin search path
+ defs.filter(
+ r'DefaultCNIPluginDirs = \[\]string{',
+ 'DefaultCNIPluginDirs = []string{' +
+ '\n "{0}",'.format(self.spec['cni-plugins'].prefix.bin)
+ )
+ # Set the default path for slirp4netns to the provided slirp4netns executable
+ defs.filter(
+ 'cniConfig := _cniConfigDir',
+ 'cniConfig := _cniConfigDir' +
+ '\n defaultEngineConfig.NetworkCmdPath = "{0}"'.format(
+ self.spec['slirp4netns'].prefix.bin.slirp4netns
+ )
+ )
+ # Use the podman install prefix as fallback path for finding container.conf
+ filter_file(
+ r'/usr',
+ self.prefix,
+ 'vendor/github.com/containers/common/pkg/config/config.go',
+ )
+
+ def install(self, spec, prefix):
+ # Set default policy.json to be located in the install prefix (documented)
+ env['EXTRA_LDFLAGS'] = (
+ '-X github.com/containers/image/v5/signature.systemDefaultPolicyPath=' +
+ prefix + '/etc/containers/policy.json'
+ )
+ # Build and installation needs to be in two separate make calls
+ # The devicemapper and btrfs drivers are (so far) not enabled in this recipe
+ tags = 'seccomp exclude_graphdriver_devicemapper exclude_graphdriver_btrfs'
+ make('-e', 'BUILDTAGS=' + tags)
+ make('install', 'PREFIX=' + prefix)
+ # Install an initial etc/containers/policy.json (configured in prefix above)
+ mkdirp(prefix.etc.containers)
+ install('test/policy.json', prefix.etc.containers)
+ # Cleanup directory trees which are created as part of the go build process
+ remove_linked_tree(prefix.src)
+ remove_linked_tree(prefix.pkg)
diff --git a/var/spack/repos/builtin/packages/poppler/poppler_page_splash.0.90.1.patch b/var/spack/repos/builtin/packages/poppler/poppler_page_splash.0.90.1.patch
index 100d026f27..100d026f27 100755..100644
--- a/var/spack/repos/builtin/packages/poppler/poppler_page_splash.0.90.1.patch
+++ b/var/spack/repos/builtin/packages/poppler/poppler_page_splash.0.90.1.patch
diff --git a/var/spack/repos/builtin/packages/poppler/poppler_page_splash.patch b/var/spack/repos/builtin/packages/poppler/poppler_page_splash.patch
index 21572d784c..21572d784c 100755..100644
--- a/var/spack/repos/builtin/packages/poppler/poppler_page_splash.patch
+++ b/var/spack/repos/builtin/packages/poppler/poppler_page_splash.patch
diff --git a/var/spack/repos/builtin/packages/portage/package.py b/var/spack/repos/builtin/packages/portage/package.py
index 331e4aed48..3ec7cca307 100644
--- a/var/spack/repos/builtin/packages/portage/package.py
+++ b/var/spack/repos/builtin/packages/portage/package.py
@@ -25,7 +25,6 @@ class Portage(CMakePackage):
variant('mpi', default=True, description='Support MPI')
variant('tangram', default=False, description='Use Tangram interface reconstruction package')
variant('jali', default=False, description='Include support for Jali mesh framework')
- variant('flecsisp', default=False, description='Include support for FleCSI mesh framework')
variant('thrust', default=False, description='Enable on-node parallelism using NVidia Thrust library')
variant('kokkos', default=False, description='Enable on-node or device parallelism with Kokkos')
variant('openmp', default=False, description="Enable on-node parallelism using OpenMP")
@@ -34,23 +33,23 @@ class Portage(CMakePackage):
depends_on("cmake@3.13:", type='build')
depends_on('mpi', when='+mpi')
+ depends_on('kokkos', when='+kokkos')
+ depends_on('thrust', when='+thrust')
+ depends_on('jali', when='+jali')
depends_on('tangram', when='+tangram')
- depends_on('tangram+mpi', when='+tangram+mpi')
- depends_on('tangram+jali', when='+tangram+jali')
- depends_on('tangram+flecsisp', when='+tangram+flecsisp')
- depends_on('tangram+thrust', when='+tangram+thrust')
- depends_on('tangram+kokkos', when='+tangram+kokkos')
- depends_on('tangram+cuda', when='+tangram+cuda')
+
+ for _variant in ['mpi', 'jali', 'openmp', 'thrust', 'kokkos', 'cuda']:
+ depends_on('tangram+' + _variant, when='+tangram+' + _variant)
+ depends_on('tangram~' + _variant, when='+tangram~' + _variant)
depends_on('wonton')
- depends_on('wonton+mpi', when='+mpi')
- depends_on('wonton+jali', when='+jali')
- depends_on('wonton+flecsisp', when='+flecsisp')
- depends_on('wonton+thrust', when='+thrust')
- depends_on('wonton+kokkos', when='+kokkos')
- depends_on('wonton+openmp', when='+openmp')
- depends_on('wonton+cuda', when='+cuda')
+ # Wonton depends array
+ wonton_variant = ['mpi', 'jali', 'openmp', 'thrust', 'kokkos', 'cuda']
+
+ for _variant in wonton_variant:
+ depends_on('wonton+' + _variant, when='+' + _variant)
+ depends_on('wonton~' + _variant, when='~' + _variant)
# Jali needs MPI
conflicts('+jali ~mpi')
@@ -84,7 +83,7 @@ class Portage(CMakePackage):
else:
options.append('-DPORTAGE_ENABLE_Jali=OFF')
- if '+flecsi' in self.spec:
+ if '+flecsisp' in self.spec:
options.append('-DPORTAGE_ENABLE_FleCSI=ON')
else:
options.append('-DPORTAGE_ENABLE_FleCSI=OFF')
@@ -103,3 +102,8 @@ class Portage(CMakePackage):
options.append('-DENABLE_APP_TESTS=OFF')
return options
+
+ def check(self):
+ if self.run_tests:
+ with working_dir(self.build_directory):
+ make("test")
diff --git a/var/spack/repos/builtin/packages/portcullis/package.py b/var/spack/repos/builtin/packages/portcullis/package.py
index 9f5b35a805..7bc2c6c62b 100644
--- a/var/spack/repos/builtin/packages/portcullis/package.py
+++ b/var/spack/repos/builtin/packages/portcullis/package.py
@@ -10,14 +10,16 @@ from spack import *
class Portcullis(AutotoolsPackage):
"""PORTable CULLing of Invalid Splice junctions"""
- homepage = "https://github.com/maplesond/portcullis"
- url = "https://github.com/maplesond/portcullis/archive/Release-1.1.2.tar.gz"
+ homepage = "https://github.com/EI-CoreBioinformatics/portcullis"
+ url = "https://github.com/EI-CoreBioinformatics/portcullis/archive/refs/tags/Release-1.1.2.tar.gz"
+ version('1.2.3', sha256='172452b5cef12a8dcc2c1c68527000743114136ee63a0dbe307ac4e2a816bc99')
version('1.1.2', sha256='5c581a7f827ffeecfe68107b7fe27ed60108325fd2f86a79d93f61b328687749')
depends_on('autoconf@2.53:', type='build')
depends_on('automake@1.11:', type='build')
depends_on('libtool@2.4.2:', type='build')
+ depends_on('boost')
depends_on('m4', type='build')
depends_on('zlib', type='build')
@@ -53,7 +55,3 @@ class Portcullis(AutotoolsPackage):
def build(self, spec, prefix):
# build manpages
make('man')
-
- # run boost build script
- sh = which('sh')
- sh('build_boost.sh')
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py b/var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py
index 9e21fd66d1..9e21fd66d1 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py
+++ b/var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch b/var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch
index d21d9364e5..d21d9364e5 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch
+++ b/var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py b/var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py
index 508a068c05..508a068c05 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py
+++ b/var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py b/var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py
index 37820a5ce6..37820a5ce6 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py
+++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch b/var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch
index 115a0f0688..115a0f0688 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch
+++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py b/var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py
index f1a4348e17..f1a4348e17 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py
+++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch b/var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch
index 4c6e67c65d..4c6e67c65d 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch
+++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch
diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fem/gcc_struct_atomic.patch b/var/spack/repos/builtin/packages/ppopen-appl-fem/gcc_struct_atomic.patch
index cd4fc87d9d..cd4fc87d9d 100755..100644
--- a/var/spack/repos/builtin/packages/ppopen-appl-fem/gcc_struct_atomic.patch
+++ b/var/spack/repos/builtin/packages/ppopen-appl-fem/gcc_struct_atomic.patch
diff --git a/var/spack/repos/builtin/packages/py-aiohttp/package.py b/var/spack/repos/builtin/packages/py-aiohttp/package.py
index 4f63193a93..4606f42fce 100644
--- a/var/spack/repos/builtin/packages/py-aiohttp/package.py
+++ b/var/spack/repos/builtin/packages/py-aiohttp/package.py
@@ -21,7 +21,7 @@ class PyAiohttp(PythonPackage):
depends_on('python@3.6:', type=('build', 'run'), when='@3.7:')
depends_on('py-attrs@17.3.0:', type=('build', 'run'))
depends_on('py-chardet@2.0:3', type=('build', 'run'))
- depends_on('py-multidict@4.5:4', type=('build', 'run'))
+ depends_on('py-multidict@4.5:4', type=('build', 'run'), when='@:3.6.2')
depends_on('py-multidict@4.5:6', type=('build', 'run'), when='@3.6.3:')
depends_on('py-async-timeout@3.0:3', type=('build', 'run'))
depends_on('py-yarl@1.0:1', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-aiosqlite/package.py b/var/spack/repos/builtin/packages/py-aiosqlite/package.py
new file mode 100644
index 0000000000..96d96bbc30
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-aiosqlite/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyAiosqlite(PythonPackage):
+ """asyncio bridge to the standard sqlite3 module"""
+
+ homepage = "https://aiosqlite.omnilib.dev"
+ pypi = "aiosqlite/aiosqlite-0.17.0.tar.gz"
+
+ version('0.17.0', sha256='f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-typing-extensions@3.7.2:', type=('build', 'run'))
+ depends_on('py-flit-core@2:3', type='build')
+
+ # aiosqlite.test requires aiounittests, not yet in spack
+ import_modules = ['aiosqlite']
diff --git a/var/spack/repos/builtin/packages/py-astroid/package.py b/var/spack/repos/builtin/packages/py-astroid/package.py
index e9fc469674..9d691f0783 100644
--- a/var/spack/repos/builtin/packages/py-astroid/package.py
+++ b/var/spack/repos/builtin/packages/py-astroid/package.py
@@ -39,12 +39,12 @@ class PyAstroid(PythonPackage):
depends_on('py-lazy-object-proxy', type=('build', 'run'))
# Starting with astroid 2.3.1, astroid's dependencies were restricted
# to a given minor version, c.f. commit e1b4e11.
- depends_on('py-lazy-object-proxy@1.4.0:1.4', when='@2.3.1:', type=('build', 'run'))
+ depends_on('py-lazy-object-proxy@1.4.0:1.4', when='@2.3.1:2.7.2', type=('build', 'run'))
depends_on('py-lazy-object-proxy@1.4.0:', when='@2.7.3:', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'), when='@:2.7.2')
depends_on('py-six@1.12:1', when='@2.3.3:2.7.2', type=('build', 'run'))
depends_on('py-wrapt', when='@:2.2', type=('build', 'run'))
- depends_on('py-wrapt@1.11:1.12', when='@2.3.3:', type=('build', 'run'))
+ depends_on('py-wrapt@1.11:1.12', when='@2.3.3:2.8.2', type=('build', 'run'))
depends_on('py-wrapt@1.11:1.13', when='@2.8.3:', type=('build', 'run'))
depends_on('py-enum34@1.1.3:', when='^python@:3.3', type=('build', 'run'))
depends_on('py-singledispatch', when='^python@:3.3', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-astropy/package.py b/var/spack/repos/builtin/packages/py-astropy/package.py
index cd2bcb1c70..adbc56cada 100644
--- a/var/spack/repos/builtin/packages/py-astropy/package.py
+++ b/var/spack/repos/builtin/packages/py-astropy/package.py
@@ -62,7 +62,7 @@ class PyAstropy(PythonPackage):
# System dependencies
depends_on('erfa')
depends_on('wcslib')
- depends_on('cfitsio')
+ depends_on('cfitsio@:3')
depends_on('expat')
def patch(self):
diff --git a/var/spack/repos/builtin/packages/py-automat/package.py b/var/spack/repos/builtin/packages/py-automat/package.py
new file mode 100644
index 0000000000..71c7f16a1d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-automat/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyAutomat(PythonPackage):
+ """Self-service finite-state machines for the programmer on the go."""
+
+ homepage = "https://github.com/glyph/Automat"
+ pypi = "Automat/Automat-20.2.0.tar.gz"
+
+ version('20.2.0', sha256='7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools-scm', type='build')
+ depends_on('py-m2r', type='build')
+
+ depends_on('py-attrs@19.2.0:', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-babel/package.py b/var/spack/repos/builtin/packages/py-babel/package.py
index 823b5f3b60..e96e3872df 100644
--- a/var/spack/repos/builtin/packages/py-babel/package.py
+++ b/var/spack/repos/builtin/packages/py-babel/package.py
@@ -14,6 +14,7 @@ class PyBabel(PythonPackage):
homepage = "https://babel.pocoo.org/en/latest/"
pypi = "Babel/Babel-2.7.0.tar.gz"
+ version('2.9.1', sha256='bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0')
version('2.7.0', sha256='e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28')
version('2.6.0', sha256='8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23')
version('2.4.0', sha256='8c98f5e5f8f5f088571f2c6bd88d530e331cbbcb95a7311a0db69d3dca7ec563')
diff --git a/var/spack/repos/builtin/packages/py-backports-os/package.py b/var/spack/repos/builtin/packages/py-backports-os/package.py
new file mode 100644
index 0000000000..e4f802eb57
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-backports-os/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyBackportsOs(PythonPackage):
+ """Backport of new features in Python's os module"""
+
+ homepage = "https://github.com/pjdelport/backports.os"
+ pypi = "backports.os/backports.os-0.1.1.tar.gz"
+
+ version('0.1.1', sha256='b472c4933094306ca08ec90b2a8cbb50c34f1fb2767775169a1c1650b7b74630')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools-scm', type='build')
+ depends_on('py-future', type=('build', 'run'), when='^python@:2')
diff --git a/var/spack/repos/builtin/packages/py-boost-histogram/package.py b/var/spack/repos/builtin/packages/py-boost-histogram/package.py
new file mode 100644
index 0000000000..8cf2f255ab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-boost-histogram/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyBoostHistogram(PythonPackage):
+ """The Boost::Histogram Python wrapper."""
+
+ homepage = "https://github.com/scikit-hep/boost-histogram"
+ pypi = "boost_histogram/boost_histogram-1.2.1.tar.gz"
+
+ version('1.2.1', sha256='a27842b2f1cfecc509382da2b25b03056354696482b38ec3c0220af0fc9b7579')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@45:', type='build')
+ depends_on('py-setuptools-scm@4.1.2:+toml', type='build')
+ depends_on('py-numpy@1.13.3:', type=('build', 'run'))
+ depends_on('py-dataclasses', type=('build', 'run'), when='^python@:3.6')
+ depends_on('py-typing-extensions', type=('build', 'run'), when='^python@:3.7')
diff --git a/var/spack/repos/builtin/packages/py-brian2/package.py b/var/spack/repos/builtin/packages/py-brian2/package.py
index 35edc02850..b9d88fd072 100644
--- a/var/spack/repos/builtin/packages/py-brian2/package.py
+++ b/var/spack/repos/builtin/packages/py-brian2/package.py
@@ -12,17 +12,18 @@ class PyBrian2(PythonPackage):
homepage = "https://www.briansimulator.org"
pypi = "Brian2/Brian2-2.2.2.1.tar.gz"
+ version('2.5.0.1', sha256='1f719b563ae38658c4c59bac5aeb06b41970c6eedc52021ddf6d9254913733d3')
version('2.4.2', sha256='7a711af40145d8c62b0bc0861d352dc64f341c3a738174d87ef9d71e50e959f2')
version('2.2.2.1', sha256='02075f66d42fd243fc5e28e1add8862709ae9fdabaffb69858e6d7f684a91525')
version('2.0.1', sha256='195d8ced0d20e9069917776948f92aa70b7457bbc6b5222b8199654402ee1153')
version('2.0rc3', sha256='05f347f5fa6b25d1ce5ec152a2407bbce033599eb6664f32f5331946eb3c7d66')
- variant('docs', default=False, description='Build the documentation')
-
depends_on('python@2.7:', type=('build', 'run'))
depends_on('python@3.6:', type=('build', 'run'), when='@2.4:')
+ depends_on('python@3.7:', type=('build', 'run'), when='@2.5:')
depends_on('py-numpy@1.10:', type=('build', 'run'))
depends_on('py-numpy@1.15:', type=('build', 'run'), when='@2.4:')
+ depends_on('py-numpy@1.17:', type=('build', 'run'), when='@2.5:')
depends_on('py-cython@0.29:', type=('build', 'run'))
depends_on('py-sympy@0.7.6:1.0,1.1.1:', type=('build', 'run'))
depends_on('py-sympy@1.2:', type=('build', 'run'), when='@2.4:')
@@ -30,9 +31,6 @@ class PyBrian2(PythonPackage):
depends_on('py-jinja2@2.7:', type=('build', 'run'))
depends_on('py-setuptools@21:', type=('build', 'run'))
depends_on('py-setuptools@24.2:', type=('build', 'run'), when='@2.4:')
- depends_on('py-sphinx@1.5:', type=('build', 'run'), when='+docs')
- depends_on('py-sphinx@1.8:', type=('build', 'run'), when='@2.4:+docs')
- depends_on('py-ipython@5:', type=('build', 'run'), when='@2.4:+docs')
def build_args(self, spec, prefix):
return ['--with-cython']
diff --git a/var/spack/repos/builtin/packages/py-build/package.py b/var/spack/repos/builtin/packages/py-build/package.py
new file mode 100644
index 0000000000..3c9a053d65
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-build/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyBuild(PythonPackage):
+ """A simple, correct PEP517 package builder."""
+
+ homepage = "https://github.com/pypa/build"
+ pypi = "build/build-0.7.0.tar.gz"
+
+ version('0.7.0', sha256='1aaadcd69338252ade4f7ec1265e1a19184bf916d84c9b7df095f423948cb89f')
+
+ variant('virtualenv', default=False, description='Install optional virtualenv dependency')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-packaging@19:', type=('build', 'run'))
+ depends_on('py-pep517@0.9.1:', type=('build', 'run'))
+ depends_on('py-tomli@1:', type=('build', 'run'))
+ depends_on('py-colorama', when='platform=windows', type=('build', 'run'))
+ depends_on('py-importlib-metadata@0.22:', when='^python@:3.7', type=('build', 'run'))
+ depends_on('py-virtualenv@20.0.35:', when='+virtualenv', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-carputils/package.py b/var/spack/repos/builtin/packages/py-carputils/package.py
index 3b91b0c70f..e97f63bed7 100644
--- a/var/spack/repos/builtin/packages/py-carputils/package.py
+++ b/var/spack/repos/builtin/packages/py-carputils/package.py
@@ -14,6 +14,7 @@ class PyCarputils(PythonPackage):
version('master', branch='master')
# Version to use with openCARP releases
+ version('oc8.2', commit='e60f639c0f39ad71c8ae11814de1f3aa726e8352')
version('oc8.1', commit='a4210fcb0fe17226a1744ee9629f85b629decba3')
version('oc7.0', commit='4c04db61744f2fb7665594d7c810699c5c55c77c')
diff --git a/var/spack/repos/builtin/packages/py-click/package.py b/var/spack/repos/builtin/packages/py-click/package.py
index 734090db51..df1f632d2e 100644
--- a/var/spack/repos/builtin/packages/py-click/package.py
+++ b/var/spack/repos/builtin/packages/py-click/package.py
@@ -12,6 +12,7 @@ class PyClick(PythonPackage):
homepage = "https://click.palletsprojects.com"
pypi = "click/click-7.1.2.tar.gz"
+ version('8.0.3', sha256='410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b')
version('8.0.1', sha256='8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a')
version('7.1.2', sha256='d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a')
version('7.0', sha256='5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7',
diff --git a/var/spack/repos/builtin/packages/py-climate/package.py b/var/spack/repos/builtin/packages/py-climate/package.py
new file mode 100644
index 0000000000..f207537f94
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-climate/package.py
@@ -0,0 +1,27 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyClimate(PythonPackage):
+ """Command line arguments parsing"""
+
+ homepage = "https://pypi.org/project/climate/"
+ url = 'https://pypi.io/packages/py3/c/climate/climate-0.1.0-py3-none-any.whl'
+
+ version('0.1.0', sha256='01026c764b34d8204b8f527a730ef667fa5827fca765993ff1ed3e9dab2c11ae', expand=False)
+
+ depends_on('python@3.7:3', type=('build', 'run'))
+ depends_on('py-wheel', type='build')
+ depends_on('py-pip', type='build')
+
+ phases = ['install']
+
+ # copied from py-azureml-core
+ def install(self, spec, prefix):
+ pip = which('pip')
+ pip('install', '--no-deps', self.stage.archive_file,
+ '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-cmake/package.py b/var/spack/repos/builtin/packages/py-cmake/package.py
index fab0dda083..7425eb116a 100644
--- a/var/spack/repos/builtin/packages/py-cmake/package.py
+++ b/var/spack/repos/builtin/packages/py-cmake/package.py
@@ -14,7 +14,14 @@ class PyCmake(PythonPackage):
homepage = "https://cmake.org/"
pypi = "cmake/cmake-3.18.0.tar.gz"
+ version('3.21.4', sha256='30fa5ed8a5ad66dcd263adb87f3ce3dc2d0ec0ac3958f5becff577e4b62cd065')
version('3.18.0', sha256='52b98c5ee70b5fa30a8623e96482227e065292f78794eb085fdf0fecb204b79b')
+ depends_on('cmake@3.21.4', type=('build', 'link', 'run'), when='@3.21.4')
depends_on('cmake@3.18.0', type=('build', 'link', 'run'), when='@3.18.0')
depends_on('py-scikit-build', type='build')
+
+ def build_args(self, spec, prefix):
+ args = []
+ args.append('-DBUILD_CMAKE_FROM_SOURCE=OFF')
+ return args
diff --git a/var/spack/repos/builtin/packages/py-cmsml/package.py b/var/spack/repos/builtin/packages/py-cmsml/package.py
new file mode 100644
index 0000000000..39d9da4326
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-cmsml/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyCmsml(PythonPackage):
+ """Python package of the CMS Machine Learning Group."""
+
+ homepage = "https://github.com/cms-ml/cmsml"
+ pypi = "cmsml/cmsml-0.1.2.tar.gz"
+
+ version('0.1.2', sha256='2e2e114323441757a64e1c24179fc6295e7bd14920b7a9c3c37128eb40ad9ceb')
+
+ depends_on('python@2.7:2.7,3.6:3', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six@1.13:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-configspace/package.py b/var/spack/repos/builtin/packages/py-configspace/package.py
new file mode 100644
index 0000000000..7e24fe0a89
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-configspace/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyConfigspace(PythonPackage):
+ """Creation and manipulation of parameter configuration spaces for
+ automated algorithm configuration and hyperparameter tuning."""
+
+ maintainers = ['Kerilk']
+
+ homepage = "https://automl.github.io/ConfigSpace/master/"
+ pypi = "ConfigSpace/ConfigSpace-0.4.20.tar.gz"
+
+ version('0.4.20', sha256='2e4ca06f5a6a61e5322a73dd7545468c79f2a3e8385cab92fdada317af41d9e9')
+
+ depends_on('python@3.7:', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-cython', type='build')
+ depends_on('py-pyparsing', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-constantly/package.py b/var/spack/repos/builtin/packages/py-constantly/package.py
new file mode 100644
index 0000000000..fe90278fc6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-constantly/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyConstantly(PythonPackage):
+ """Symbolic constants in Python"""
+
+ homepage = "https://github.com/twisted/constantly"
+ pypi = "constantly/constantly-15.1.0.tar.gz"
+
+ version('15.1.0', sha256='586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-copulas/package.py b/var/spack/repos/builtin/packages/py-copulas/package.py
new file mode 100644
index 0000000000..fa19e4251b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-copulas/package.py
@@ -0,0 +1,28 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyCopulas(PythonPackage):
+ """Copulas is a Python library for modeling multivariate
+ distributions and sampling from them using copula
+ functions. Given a table containing numerical data, we can
+ use Copulas to learn the distribution and later on generate
+ new synthetic rows following the same statistical
+ properties."""
+
+ homepage = "https://github.com/sdv-dev/Copulas"
+ pypi = "copulas/copulas-0.6.0.tar.gz"
+
+ version('0.6.0', sha256='9de6cc738769db19794fc18e2f506a4b5ee17e6902519c0842a4698c0efb6749')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-matplotlib@3.2:3', type=('build', 'run'))
+ depends_on('py-numpy@1.18:1.19', type=('build', 'run'), when='^python@3.6')
+ depends_on('py-numpy@1.20:1', type=('build', 'run'), when='^python@3.7:')
+ depends_on('py-pandas@1.1.3:1', type=('build', 'run'))
+ depends_on('py-scipy@1.5.4:1', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-correctionlib/package.py b/var/spack/repos/builtin/packages/py-correctionlib/package.py
new file mode 100644
index 0000000000..47727d28c5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-correctionlib/package.py
@@ -0,0 +1,36 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyCorrectionlib(PythonPackage):
+ """A generic correction library"""
+
+ homepage = "https://github.com/cms-nanoAOD/correctionlib"
+ pypi = "correctionlib/correctionlib-2.0.0.tar.gz"
+
+ version('2.1.0', sha256='edf79644dc1d9d94f12b4b45366331e5da3f1e21d4cbcd3bb8b0d4b1421b0c44')
+ version('2.0.0', sha256='e4d240cbdb2633a8955ddcd02d5b9bfb33d7e1a33554d6f7957f2dec56988a67')
+
+ variant('convert', default=False,
+ description='Includes select conversion routines for common types')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-setuptools-scm@3.4:+toml', type='build')
+ depends_on('py-scikit-build', type='build')
+ depends_on('py-cmake@3.11:', type='build')
+ depends_on('py-make', type='build')
+ depends_on('py-pybind11@2.6.1:', type='build')
+ depends_on('py-numpy@1.13.3:', type=('build', 'run'))
+ depends_on('py-typing', type=('build', 'run'), when='^python@:3.4')
+ depends_on('py-typing-extensions', type=('build', 'run'), when='^python@:3.7')
+ depends_on('py-dataclasses', type=('build', 'run'), when='^python@:3.6')
+ depends_on('py-pydantic@1.7.3:', type=('build', 'run'))
+ depends_on('py-rich', type=('build', 'run'))
+ depends_on('py-pandas', type=('build', 'run'), when='+convert')
+ depends_on('py-uproot@4.0.4:', type=('build', 'run'), when='+convert')
+ depends_on('py-requests', type=('build', 'run'), when='+convert')
diff --git a/var/spack/repos/builtin/packages/py-coverage/package.py b/var/spack/repos/builtin/packages/py-coverage/package.py
index ac62512a3d..335306b754 100644
--- a/var/spack/repos/builtin/packages/py-coverage/package.py
+++ b/var/spack/repos/builtin/packages/py-coverage/package.py
@@ -12,7 +12,8 @@ class PyCoverage(PythonPackage):
homepage = "https://nedbatchelder.com/code/coverage/"
pypi = "coverage/coverage-4.5.4.tar.gz"
- version('5.5', sha256='ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c')
+ version('6.1.2', sha256='d9a635114b88c0ab462e0355472d00a180a5fbfd8511e7f18e4ac32652e7d972')
+ version('5.5', sha256='ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c')
version('5.3', sha256='280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0')
version('5.0.4', sha256='1b60a95fc995649464e0cd48cecc8288bac5f4198f21d04b8229dc4097d76823')
version('4.5.4', sha256='e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c')
@@ -20,6 +21,11 @@ class PyCoverage(PythonPackage):
version('4.3.4', sha256='eaaefe0f6aa33de5a65f48dd0040d7fe08cac9ac6c35a56d0a7db109c3e733df')
version('4.0a6', sha256='85c7f3efceb3724ab066a3fcccc05b9b89afcaefa5b669a7e2222d31eac4728d')
+ variant('toml', default=False, description='Enable pyproject.toml support')
+
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
- depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when="@5.0.0:")
+ depends_on('python@2.7:2.8,3.5:', when='@5:', type=('build', 'run'))
+ depends_on('python@3.6:', when='@6:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
+ depends_on('py-tomli', when='@6: +toml', type=('build', 'run'))
+ depends_on('py-toml', when='@:5 +toml', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-cryptography/package.py b/var/spack/repos/builtin/packages/py-cryptography/package.py
index 5c80b4aa34..81a5bcfd4b 100644
--- a/var/spack/repos/builtin/packages/py-cryptography/package.py
+++ b/var/spack/repos/builtin/packages/py-cryptography/package.py
@@ -14,13 +14,14 @@ class PyCryptography(PythonPackage):
homepage = "https://github.com/pyca/cryptography"
pypi = "cryptography/cryptography-1.8.1.tar.gz"
+ version('35.0.0', sha256='9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d')
+ version('3.4.8', sha256='94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c')
version('3.4.7', sha256='3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713')
- version('2.7', sha256='e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6')
+ version('2.7', sha256='e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6')
version('2.3.1', sha256='8d10113ca826a4c29d5b85b2c4e045ffa8bad74fb525ee0eceb1d38d4c70dfd6')
version('1.8.1', sha256='323524312bb467565ebca7e50c8ae5e9674e544951d28a2904a50012a8828190')
- variant('idna', default=False, description='Deprecated U-label support')
- conflicts('+idna', when='@:2.4,3.1:')
+ variant('idna', default=False, when='@2.5:3.0', description='Deprecated U-label support')
# dependencies taken from https://github.com/pyca/cryptography/blob/master/setup.py
depends_on('python@3.6:', when='@3.4:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-ctgan/package.py b/var/spack/repos/builtin/packages/py-ctgan/package.py
new file mode 100644
index 0000000000..f9ab396bdb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ctgan/package.py
@@ -0,0 +1,29 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyCtgan(PythonPackage):
+ """CTGAN is a collection of Deep Learning based Synthetic
+ Data Generators for single table data, which are able to
+ learn from real data and generate synthetic clones with
+ high fidelity."""
+
+ homepage = "https://github.com/sdv-dev/CTGAN"
+ pypi = "ctgan/ctgan-0.5.0.tar.gz"
+
+ version('0.5.0', sha256='b8a5dbf21dab2d2e2690013f13feb0922f5bad13440b15bc031ce9d58c7fb988')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-packaging@20:21', type=('build', 'run'))
+ depends_on('py-numpy@1.18:1.19', type=('build', 'run'), when='^python@3.6')
+ depends_on('py-numpy@1.20:1', type=('build', 'run'), when='^python@3.7:')
+ depends_on('py-pandas@1.1.3:1', type=('build', 'run'))
+ depends_on('py-scikit-learn@0.24:1', type=('build', 'run'))
+ depends_on('py-torch@1.8.0:1', type=('build', 'run'))
+ depends_on('py-torchvision@0.9', type=('build', 'run'))
+ depends_on('py-rdt@0.6.1:0.6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-cx-oracle/package.py b/var/spack/repos/builtin/packages/py-cx-oracle/package.py
new file mode 100644
index 0000000000..8c47892fea
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-cx-oracle/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyCxOracle(PythonPackage):
+ """Python interface to Oracle"""
+
+ homepage = "https://oracle.github.io/python-cx_Oracle"
+ pypi = "cx_Oracle/cx_Oracle-8.3.0.tar.gz"
+
+ version('8.3.0', sha256='3b2d215af4441463c97ea469b9cc307460739f89fdfa8ea222ea3518f1a424d9')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('oracle-instant-client', type='run')
+ depends_on('py-setuptools@40.6.0:', type='build')
+ depends_on('py-wheel', type='build')
diff --git a/var/spack/repos/builtin/packages/py-cycler/package.py b/var/spack/repos/builtin/packages/py-cycler/package.py
index 23b429e97e..d226efd1d4 100644
--- a/var/spack/repos/builtin/packages/py-cycler/package.py
+++ b/var/spack/repos/builtin/packages/py-cycler/package.py
@@ -10,9 +10,11 @@ class PyCycler(PythonPackage):
"""Composable style cycles."""
homepage = "https://matplotlib.org/cycler/"
- url = "https://github.com/matplotlib/cycler/archive/v0.10.0.tar.gz"
+ pypi = "cycler/cycler-0.11.0.tar.gz"
- version('0.10.0', sha256='b6d217635e03024196225367b1a438996dbbf0271bec488f00584f0e7dc15cfa')
+ version('0.11.0', sha256='9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f')
+ version('0.10.0', sha256='cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8')
+ depends_on('python@3.6:', when='@0.11:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
- depends_on('py-six', type=('build', 'run'))
+ depends_on('py-six', when='@:0.10', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py
index 40d31f8eaf..e1f432f544 100644
--- a/var/spack/repos/builtin/packages/py-cython/package.py
+++ b/var/spack/repos/builtin/packages/py-cython/package.py
@@ -9,9 +9,11 @@ from spack import *
class PyCython(PythonPackage):
"""The Cython compiler for writing C extensions for the Python language."""
+ homepage = "https://github.com/cython/cython"
pypi = "cython/Cython-0.29.21.tar.gz"
- version('0.29.24', sha256='cdf04d07c3600860e8c2ebaad4e8f52ac3feb212453c1764a49ac08c827e8443')
+ version('3.0.0a9', sha256='23931c45877432097cef9de2db2dc66322cbc4fc3ebbb42c476bb2c768cecff0')
+ version('0.29.24', sha256='cdf04d07c3600860e8c2ebaad4e8f52ac3feb212453c1764a49ac08c827e8443', preferred=True)
version('0.29.23', sha256='6a0d31452f0245daacb14c979c77e093eb1a546c760816b5eed0047686baad8e')
version('0.29.22', sha256='df6b83c7a6d1d967ea89a2903e4a931377634a297459652e4551734c48195406')
version('0.29.21', sha256='e57acb89bd55943c8d8bf813763d20b9099cc7165c0f16b707631a7654be9cad')
@@ -32,7 +34,8 @@ class PyCython(PythonPackage):
version('0.23.4', sha256='fec42fecee35d6cc02887f1eef4e4952c97402ed2800bfe41bbd9ed1a0730d8e')
version('0.21.2', sha256='b01af23102143515e6138a4d5e185c2cfa588e0df61c0827de4257bac3393679')
- depends_on('python@2.6:2.8,3.3:', when='@0.23:', type=('build', 'link', 'run'))
+ depends_on('python@2.7:2,3.4:', when='@3:', type=('build', 'link', 'run'))
+ depends_on('python@2.6:2,3.3:', when='@0.23:', type=('build', 'link', 'run'))
depends_on('python@:2', when='@:0.22', type=('build', 'link', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('gdb@7.2:', type='test')
diff --git a/var/spack/repos/builtin/packages/py-debugpy/package.py b/var/spack/repos/builtin/packages/py-debugpy/package.py
index 64dee06b76..45fa0db633 100644
--- a/var/spack/repos/builtin/packages/py-debugpy/package.py
+++ b/var/spack/repos/builtin/packages/py-debugpy/package.py
@@ -18,6 +18,7 @@ class PyDebugpy(PythonPackage):
'debugpy.common'
]
+ version('1.5.1', sha256='d2b09e91fbd1efa4f4fda121d49af89501beda50c18ed7499712c71a4bf3452e')
version('1.4.1', sha256='889316de0b8ff3732927cb058cfbd3371e4cd0002ecc170d34c755ad289c867c')
depends_on('python@2.7:2.8,3.5:', type=('build', 'link', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-deepdiff/package.py b/var/spack/repos/builtin/packages/py-deepdiff/package.py
new file mode 100644
index 0000000000..48fab797b5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-deepdiff/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyDeepdiff(PythonPackage):
+ """Deep Difference and Search of any Python object/data.."""
+
+ homepage = "https://github.com/seperman/deepdiff"
+ pypi = "deepdiff/deepdiff-5.6.0.tar.gz"
+
+ version('5.6.0', sha256='e3f1c3a375c7ea5ca69dba6f7920f9368658318ff1d8a496293c79481f48e649')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-ordered-set@4.0.2', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-dh-scikit-optimize/package.py b/var/spack/repos/builtin/packages/py-dh-scikit-optimize/package.py
new file mode 100644
index 0000000000..571c9024d2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-dh-scikit-optimize/package.py
@@ -0,0 +1,38 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyDhScikitOptimize(PythonPackage):
+ """A Modified version of scikit-optimize a Sequential model-based
+ optimization toolbox for DeepHyper.
+ Scikit-Optimize, or skopt, is a simple and efficient library to
+ minimize (very) expensive and noisy black-box functions. It implements
+ several methods for sequential model-based optimization. skopt aims to
+ be accessible and easy to use in many contexts.
+
+ The library is built on top of NumPy, SciPy and Scikit-Learn."""
+
+ maintainers = ['Kerilk']
+
+ homepage = "https://github.com/deephyper/scikit-optimize"
+ pypi = "dh-scikit-optimize/dh-scikit-optimize-0.9.0.tar.gz"
+
+ version('0.9.0', sha256='fe70aa57ec5150a3d356b2184f0dda1ecc4ecb7e82d35edac3980094d409d676')
+
+ variant('plots', default=False,
+ description='Build with plot support from py-matplotlib')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-joblib@0.11:', type=('build', 'run'))
+ depends_on('py-pyaml@16.9:', type=('build', 'run'))
+ depends_on('py-numpy@1.13.3:', type=('build', 'run'))
+ depends_on('py-scipy@0.19.1:', type=('build', 'run'))
+ depends_on('py-scikit-learn@0.20.0:', type=('build', 'run'))
+ depends_on('py-configspace@0.4.20:', type=('build', 'run'))
+
+ depends_on('py-matplotlib@2.0.0:', when='+plots', type='run')
diff --git a/var/spack/repos/builtin/packages/py-distlib/package.py b/var/spack/repos/builtin/packages/py-distlib/package.py
new file mode 100644
index 0000000000..dc5ce6fd34
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-distlib/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyDistlib(PythonPackage):
+ """Distribution utilities"""
+
+ homepage = "https://bitbucket.org/pypa/distlib"
+ pypi = "distlib/distlib-0.3.3.zip"
+
+ version('0.3.3', sha256='d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05')
diff --git a/var/spack/repos/builtin/packages/py-downhill/package.py b/var/spack/repos/builtin/packages/py-downhill/package.py
new file mode 100644
index 0000000000..23c164a066
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-downhill/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyDownhill(PythonPackage):
+ """Stochastic optimization routines for Theano"""
+
+ homepage = "http://github.com/lmjohns3/downhill"
+ pypi = "downhill/downhill-0.4.0.tar.gz"
+
+ version('0.4.0', sha256='074ad91deb06c05108c67d982ef71ffffb6ede2c77201abc69e332649f823b42')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-theano', type=('build', 'run'))
+ depends_on('py-click', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-envisage/package.py b/var/spack/repos/builtin/packages/py-envisage/package.py
index 7068987db6..a115726c93 100644
--- a/var/spack/repos/builtin/packages/py-envisage/package.py
+++ b/var/spack/repos/builtin/packages/py-envisage/package.py
@@ -13,13 +13,16 @@ class PyEnvisage(PythonPackage):
application consists primarily of plug-ins. In this respect, it is similar
to the Eclipse and Netbeans frameworks for Java applications."""
- homepage = "https://docs.enthought.com/envisage"
+ homepage = "https://github.com/enthought/envisage"
pypi = "envisage/envisage-4.9.2.tar.gz"
+ version('6.0.1', sha256='8864c29aa344f7ac26eeb94788798f2d0cc791dcf95c632da8d79ebc580e114c')
version('4.9.2', sha256='ed9580ac6ea17b333f1cce5b94656aed584798d56d8bd364f996a06fe1ac32eb')
- depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
+ depends_on('python@3.6:', when='@5:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.5:', when='@:4', type=('build', 'run'))
depends_on('py-apptools', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
- depends_on('py-six', type=('build', 'run'))
+ depends_on('py-six', when='@:4', type=('build', 'run'))
+ depends_on('py-traits@6.2:', when='@6:', type=('build', 'run'))
depends_on('py-traits', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-faker/package.py b/var/spack/repos/builtin/packages/py-faker/package.py
new file mode 100644
index 0000000000..fcde7bbfcb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-faker/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyFaker(PythonPackage):
+ """Faker is a Python package that generates fake data for
+ you. Whether you need to bootstrap your database, create
+ good-looking XML documents, fill-in your persistence to
+ stress test it, or anonymize data taken from a production
+ service, Faker is for you."""
+
+ homepage = "https://github.com/joke2k/faker"
+ pypi = "Faker/Faker-9.8.2.tar.gz"
+
+ version('9.8.2', sha256='393bd1b5becf3ccbc04a4f0f13da7e437914b24cafd1a4d8b71b5fecff54fb34')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-python-dateutil@2.4:', type=('build', 'run'))
+ depends_on('py-text-unidecode@1.3', type=('build', 'run'))
+ depends_on('py-typing-extensions@3.10.0.2:', type=('build', 'run'), when='^python@:3.8')
diff --git a/var/spack/repos/builtin/packages/py-fastjsonschema/package.py b/var/spack/repos/builtin/packages/py-fastjsonschema/package.py
new file mode 100644
index 0000000000..61b0093298
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-fastjsonschema/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class PyFastjsonschema(PythonPackage):
+ """Fast JSON schema validator for Python."""
+
+ homepage = "https://github.com/horejsek/python-fastjsonschema"
+ pypi = "fastjsonschema/fastjsonschema-2.15.1.tar.gz"
+
+ version('2.15.1', sha256='671f36d225b3493629b5e789428660109528f373cf4b8a22bac6fa2f8191c2d2')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-filelock/package.py b/var/spack/repos/builtin/packages/py-filelock/package.py
index ffa7a37685..9056a54bd1 100644
--- a/var/spack/repos/builtin/packages/py-filelock/package.py
+++ b/var/spack/repos/builtin/packages/py-filelock/package.py
@@ -7,14 +7,17 @@ from spack import *
class PyFilelock(PythonPackage):
- """This package contains a single module, which implements a platform
+ """A platform-independent file lock for Python.
+
+ This package contains a single module, which implements a platform
independent file lock in Python, which provides a simple way of
inter-process communication"""
homepage = "https://github.com/benediktschmitt/py-filelock"
pypi = "filelock/filelock-3.0.4.tar.gz"
- version('3.0.12', sha256='18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59')
+ version('3.4.0', sha256='93d512b32a23baf4cac44ffd72ccf70732aeff7b8050fcaf6d3ec406d954baf4')
+ version('3.0.12', sha256='18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59')
version('3.0.4', sha256='011327d4ed939693a5b28c0fdf2fd9bda1f68614c1d6d0643a89382ce9843a71')
version('3.0.3', sha256='7d8a86350736aa0efea0730e6a7f774195cbb1c2d61134c15f6be576399e87ff')
version('3.0.0', sha256='b3ad481724adfb2280773edd95ce501e497e88fa4489c6e41e637ab3fd9a456c')
@@ -25,4 +28,6 @@ class PyFilelock(PythonPackage):
version('2.0.9', sha256='0f91dce339c9f25d6f2e0733a17e4f9a47b139dffda52619a0e61e013e5c6782')
version('2.0.8', sha256='7e48e4906de3c9a5d64d8f235eb3ae1050dfefa63fd65eaf318cc915c935212b')
+ depends_on('python@3.6:', when='@3.3:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.5:', when='@3.1:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-fire/package.py b/var/spack/repos/builtin/packages/py-fire/package.py
new file mode 100644
index 0000000000..12ffc54f97
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-fire/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack import *
+
+
+class PyFire(PythonPackage):
+ """Python Fire is a library for automatically generating command line
+ interfaces (CLIs) with a single line of code."""
+
+ homepage = "https://github.com/google/python-fire"
+ pypi = "fire/fire-0.2.1.tar.gz"
+
+ version('0.4.0', sha256='c5e2b8763699d1142393a46d0e3e790c5eb2f0706082df8f647878842c216a62')
+ version('0.3.1', sha256='9736a16227c3d469e5d2d296bce5b4d8fa8d7851e953bda327a455fc2994307f')
+ version('0.3.0', sha256='96c372096afcf33ddbadac8a7ca5b7e829e8d7157d0030bd964bf959afde5c2c')
+ version('0.2.1', sha256='6865fefc6981a713d2ce56a2a2c92c56c729269f74a6cddd6f4b94d16ae084c9')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-termcolor', type=('build', 'run'))
+ depends_on('py-enum34', type=('build', 'run'), when='@0.3.0: ^python@:3.3')
diff --git a/var/spack/repos/builtin/packages/py-flask/package.py b/var/spack/repos/builtin/packages/py-flask/package.py
index 53f5c6d3be..d56bacef1e 100644
--- a/var/spack/repos/builtin/packages/py-flask/package.py
+++ b/var/spack/repos/builtin/packages/py-flask/package.py
@@ -12,6 +12,7 @@ class PyFlask(PythonPackage):
homepage = "https://palletsprojects.com/p/flask/"
pypi = "Flask/Flask-1.1.1.tar.gz"
+ version('2.0.2', sha256='7b2fb8e934ddd50731893bdcdb00fc8c0315916f9fcd50d22c7cc1a95ab634e2')
version('1.1.2', sha256='4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060')
version('1.1.1', sha256='13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52')
version('0.12.4', sha256='2ea22336f6d388b4b242bc3abf8a01244a8aa3e236e7407469ef78c16ba355dd')
@@ -19,9 +20,15 @@ class PyFlask(PythonPackage):
version('0.12.1', sha256='9dce4b6bfbb5b062181d3f7da8f727ff70c1156cbb4024351eafd426deb5fb88')
version('0.11.1', sha256='b4713f2bfb9ebc2966b8a49903ae0d3984781d5c878591cf2f7b484d28756b0e')
- depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
- depends_on('py-setuptools', type=('build', 'run'))
- depends_on('py-werkzeug@0.15:', type=('build', 'run'))
- depends_on('py-jinja2@2.10.1:', type=('build', 'run'))
+ depends_on('python@3.6:', when='@2:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.5:', type=('build', 'run'))
+ depends_on('py-setuptools', type=('build', 'run'))
+
+ depends_on('py-werkzeug@2:', when='@2:', type=('build', 'run'))
+ depends_on('py-werkzeug@0.15:', type=('build', 'run'))
+ depends_on('py-jinja2@3:', when='@2:', type=('build', 'run'))
+ depends_on('py-jinja2@2.10.1:', type=('build', 'run'))
+ depends_on('py-itsdangerous@2:', when='@2:', type=('build', 'run'))
depends_on('py-itsdangerous@0.24:', type=('build', 'run'))
- depends_on('py-click@5.1:', type=('build', 'run'))
+ depends_on('py-click@7.1.2:', when='@2:', type=('build', 'run'))
+ depends_on('py-click@5.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-flawfinder/package.py b/var/spack/repos/builtin/packages/py-flawfinder/package.py
new file mode 100644
index 0000000000..ceb7f35a54
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-flawfinder/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyFlawfinder(PythonPackage, SourceforgePackage):
+ """a program that examines source code looking for security weaknesses"""
+
+ homepage = "http://dwheeler.com/flawfinder/"
+ sourceforge_mirror_path = "project/flawfinder/flawfinder-2.0.19.tar.gz"
+
+ version('2.0.19', sha256='fe550981d370abfa0a29671346cc0b038229a9bd90b239eab0f01f12212df618')
+
+ depends_on('python@2.7:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-fonttools/package.py b/var/spack/repos/builtin/packages/py-fonttools/package.py
new file mode 100644
index 0000000000..5937181a9e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-fonttools/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyFonttools(PythonPackage):
+ """fontTools is a library for manipulating fonts, written in Python.
+
+ The project includes the TTX tool, that can convert TrueType and OpenType fonts to
+ and from an XML text format, which is also called TTX. It supports TrueType,
+ OpenType, AFM and to an extent Type 1 and some Mac-specific formats."""
+
+ homepage = "https://github.com/fonttools/fonttools"
+ pypi = "fonttools/fonttools-4.28.1.zip"
+
+ version('4.28.1', sha256='8c8f84131bf04f3b1dcf99b9763cec35c347164ab6ad006e18d2f99fcab05529')
+
+ depends_on('python@3.7:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-formulaic/package.py b/var/spack/repos/builtin/packages/py-formulaic/package.py
new file mode 100644
index 0000000000..6f183828db
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-formulaic/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyFormulaic(PythonPackage):
+ """Formulaic is a high-performance implementation of Wilkinson formulas
+ for Python."""
+
+ homepage = "https://github.com/matthewwardrop/formulaic"
+ pypi = "formulaic/formulaic-0.2.4.tar.gz"
+
+ version('0.2.4', sha256='15b71ea8972fb451f80684203cddd49620fc9ed5c2e35f31e0874e9c41910d1a')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-setupmeta', type='build')
+ depends_on('py-astor', type=('build', 'run'))
+ depends_on('py-interface-meta@1.2:', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-pandas', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-wrapt', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-fs/package.py b/var/spack/repos/builtin/packages/py-fs/package.py
new file mode 100644
index 0000000000..af1a4121d9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-fs/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyFs(PythonPackage):
+ """Python's filesystem abstraction layer"""
+
+ homepage = "https://github.com/PyFilesystem/pyfilesystem2"
+ pypi = "fs/fs-2.4.14.tar.gz"
+
+ version('2.4.14', sha256='9555dc2bc58c58cac03478ac7e9f622d29fe2d20a4384c24c90ab50de2c7b36c')
+ version('0.5.4', sha256='ba2cca8773435a7c86059d57cb4b8ea30fda40f8610941f7822d1ce3ffd36197')
+
+ depends_on('py-setuptools@38.3.0:', type='build')
+ depends_on('py-setuptools@:57', type='build', when='@:0')
+ depends_on('py-appdirs@1.4.3:1.4', type=('build', 'run'))
+ depends_on('py-pytz', type=('build', 'run'))
+ depends_on('py-six@1.10:1', type=('build', 'run'))
+ depends_on('py-enum34@1.1.6:1.1', type=('build', 'run'), when='^python@:3.3')
+ depends_on('py-typing@3.6:3', type=('build', 'run'), when='^python@:3.5')
+ depends_on('py-backports-os@0.1:0', type=('build', 'run'), when='^python@:2')
diff --git a/var/spack/repos/builtin/packages/py-gast/package.py b/var/spack/repos/builtin/packages/py-gast/package.py
index 83706e5056..a6c3f41f7a 100644
--- a/var/spack/repos/builtin/packages/py-gast/package.py
+++ b/var/spack/repos/builtin/packages/py-gast/package.py
@@ -12,6 +12,7 @@ class PyGast(PythonPackage):
homepage = "https://github.com/serge-sans-paille/gast"
pypi = "gast/gast-0.3.2.tar.gz"
+ version('0.5.3', sha256='cfbea25820e653af9c7d1807f659ce0a0a9c64f2439421a7bba4f0983f532dea')
version('0.5.2', sha256='f81fcefa8b982624a31c9e4ec7761325a88a0eba60d36d1da90e47f8fe3c67f7')
version('0.5.1', sha256='b00e63584db482ffe6107b5832042bbe5c5bf856e3c7279b6e93201b3dcfcb46')
version('0.5.0', sha256='8109cbe7aa0f7bf7e4348379da05b8137ea1f059f073332c3c1cedd57db8541f')
diff --git a/var/spack/repos/builtin/packages/py-genshi/package.py b/var/spack/repos/builtin/packages/py-genshi/package.py
index 30d2efa821..1fdf64bb51 100644
--- a/var/spack/repos/builtin/packages/py-genshi/package.py
+++ b/var/spack/repos/builtin/packages/py-genshi/package.py
@@ -15,4 +15,4 @@ class PyGenshi(PythonPackage):
version('0.6.1', sha256='fed947f11dbcb6792bb7161701ec3b9804055ad68c8af0ab4f0f9b25e9a18dbd')
version('0.6', sha256='32aaf76a03f88efa04143bf80700399e6d84eead818fdd19d763fd76af972a4b')
- depends_on("py-setuptools", type='build')
+ depends_on("py-setuptools@:57", type='build')
diff --git a/var/spack/repos/builtin/packages/py-gevent/package.py b/var/spack/repos/builtin/packages/py-gevent/package.py
index 144ccf4b02..0b65a45cfd 100644
--- a/var/spack/repos/builtin/packages/py-gevent/package.py
+++ b/var/spack/repos/builtin/packages/py-gevent/package.py
@@ -12,16 +12,25 @@ class PyGevent(PythonPackage):
homepage = "https://www.gevent.org"
pypi = "gevent/gevent-1.3a2.tar.gz"
- version('1.5.0', sha256='b2814258e3b3fb32786bb73af271ad31f51e1ac01f33b37426b66cb8491b4c29')
- version('1.3a2', sha256='f7ab82697111ea233c7beeadf5240f669dfad9c4bbc89a3ec80a49e2c48a65bd')
+ version('21.8.0', sha256='43e93e1a4738c922a2416baf33f0afb0a20b22d3dba886720bc037cd02a98575')
+ version('1.5.0', sha256='b2814258e3b3fb32786bb73af271ad31f51e1ac01f33b37426b66cb8491b4c29')
+ version('1.3a2', sha256='f7ab82697111ea233c7beeadf5240f669dfad9c4bbc89a3ec80a49e2c48a65bd')
- depends_on('py-setuptools@24.2:', type='build', when='@:1.4')
- depends_on('py-setuptools@40.8:', type='build', when='@1.5:')
- depends_on('py-cython@0.27:', type='build', when='@:1.4')
- depends_on('py-cython@0.29.14:', type='build', when='@1.5:')
- depends_on('py-cffi@1.4:', type=('build', 'run'), when='@:1.4')
- depends_on('py-cffi@1.12.2:', type=('build', 'run'), when='@1.5:')
- depends_on('py-greenlet@0.4.13:', type=('build', 'run'), when='@:1.4')
- depends_on('py-greenlet@0.4.14:', type=('build', 'run'), when='@1.5:')
- depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
- depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@1.5:')
+ depends_on('python@2.7:2,3.6:', when='@21.8:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.5:', when='@1.5:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.4:', type=('build', 'run'))
+ depends_on('py-setuptools@40.8:', when='@20.5.1:', type=('build', 'run'))
+ depends_on('py-setuptools@40.8:', when='@1.5:', type='build')
+ depends_on('py-setuptools@24.2:', when='@:1.4', type='build')
+ depends_on('py-cython@3.0.0a9:', when='@20.5.1:', type='build')
+ depends_on('py-cython@0.29.14:', when='@1.5:', type='build')
+ depends_on('py-cython@0.27:', when='@:1.4', type='build')
+ depends_on('py-cython@0.27:', when='@:1.4', type='build')
+ depends_on('py-cffi@1.12.3:', when='@1.5:', type=('build', 'run')) # from pyproject.toml
+ depends_on('py-cffi@1.4:', when='@:1.4', type=('build', 'run'))
+ depends_on('py-greenlet@1.1:1', when='@21.8:', type=('build', 'run'))
+ depends_on('py-greenlet@0.4.17:1', when='@20.12:', type=('build', 'run'))
+ depends_on('py-greenlet@0.4.14:', when='@1.5:', type=('build', 'run'))
+ depends_on('py-greenlet@0.4.13:', when='@:1.4', type=('build', 'run'))
+ depends_on('py-zope-event', when='@20.5.1:', type=('build', 'run'))
+ depends_on('py-zope-interface', when='@20.5.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-gin-config/package.py b/var/spack/repos/builtin/packages/py-gin-config/package.py
new file mode 100644
index 0000000000..b10fa8bbb8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-gin-config/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyGinConfig(PythonPackage):
+ """Gin provides a lightweight configuration framework for
+ Python, based on dependency injection."""
+
+ homepage = "https://github.com/google/gin-config"
+ pypi = "gin-config/gin-config-0.5.0.tar.gz"
+
+ version('0.5.0', sha256='0c6ea5026ded927c8c93c990b01c695257c1df446e45e549a158cfbc79e19ed6')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-gpy/package.py b/var/spack/repos/builtin/packages/py-gpy/package.py
index 920cd39755..c45c1674af 100644
--- a/var/spack/repos/builtin/packages/py-gpy/package.py
+++ b/var/spack/repos/builtin/packages/py-gpy/package.py
@@ -11,6 +11,7 @@ class PyGpy(PythonPackage):
homepage = "https://sheffieldml.github.io/GPy/"
pypi = "gpy/GPy-1.9.9.tar.gz"
+ maintainers = ['liuyangzhuan']
version('1.9.9', sha256='04faf0c24eacc4dea60727c50a48a07ddf9b5751a3b73c382105e2a31657c7ed')
version('0.8.8', sha256='e135d928cf170e2ec7fb058a035b5a7e334dc6b84d0bfb981556782528341988')
diff --git a/var/spack/repos/builtin/packages/py-greenlet/package.py b/var/spack/repos/builtin/packages/py-greenlet/package.py
index 43d9f197f2..eb438510e0 100644
--- a/var/spack/repos/builtin/packages/py-greenlet/package.py
+++ b/var/spack/repos/builtin/packages/py-greenlet/package.py
@@ -12,6 +12,7 @@ class PyGreenlet(PythonPackage):
homepage = "https://github.com/python-greenlet/greenlet"
pypi = "greenlet/greenlet-0.4.17.tar.gz"
+ version('1.1.2', sha256='e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a')
version('1.1.0', sha256='c87df8ae3f01ffb4483c796fe1b15232ce2b219f0b18126948616224d3f658ee')
version('0.4.17', sha256='41d8835c69a78de718e466dd0e6bfd4b46125f21a67c3ff6d76d8d8059868d6b')
version('0.4.13', sha256='0fef83d43bf87a5196c91e73cb9772f945a4caaff91242766c5916d1dd1381e4')
diff --git a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py
new file mode 100644
index 0000000000..1ebc9b2ac9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py
@@ -0,0 +1,60 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyGrpcioTools(PythonPackage):
+ """Protobuf code generator for gRPC"""
+
+ homepage = "https://grpc.io/"
+ pypi = "grpcio-tools/grpcio-tools-1.42.0.tar.gz"
+
+ version('1.42.0', sha256='d0a0daa82eb2c2fb8e12b82a458d1b7c5516fe1135551da92b1a02e2cba93422')
+ version('1.39.0', sha256='39dfe7415bc0d3860fdb8dd90607594b046b88b57dbe64284efa4820f951c805')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-protobuf@3.5.0.post1:3', type=('build', 'run'))
+ depends_on('py-grpcio@1.42.0:', type=('build', 'run'), when='@1.42.0:')
+ depends_on('py-grpcio@1.39.0:', type=('build', 'run'), when='@1.39.0:1.41')
+ depends_on('py-cython@0.23:', type='build')
+ depends_on('openssl')
+ depends_on('zlib')
+ depends_on('c-ares')
+ depends_on('re2+shared')
+
+ def setup_build_environment(self, env):
+ env.set('GRPC_PYTHON_BUILD_WITH_CYTHON', True)
+ env.set('GRPC_PYTHON_BUILD_SYSTEM_OPENSSL', True)
+ env.set('GRPC_PYTHON_BUILD_SYSTEM_ZLIB', True)
+ env.set('GRPC_PYTHON_BUILD_SYSTEM_CARES', True)
+ env.set('GRPC_PYTHON_BUILD_SYSTEM_RE2', True)
+ # https://github.com/grpc/grpc/pull/24449
+ env.set('GRPC_BUILD_WITH_BORING_SSL_ASM', '')
+ env.set('GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS', str(make_jobs))
+
+ for dep in self.spec.dependencies(deptype='link'):
+ query = self.spec[dep.name]
+ env.prepend_path('LIBRARY_PATH', query.libs.directories[0])
+ env.prepend_path('CPATH', query.headers.directories[0])
+
+ def patch(self):
+ if self.spec.satisfies('%fj'):
+ filter_file("-std=gnu99", "", "setup.py")
+
+ # use the spack packages
+ filter_file(r'(\s+SSL_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['openssl'].prefix.include),
+ 'setup.py')
+ filter_file(r'(\s+ZLIB_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['zlib'].prefix.include),
+ 'setup.py')
+ filter_file(r'(\s+CARES_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['c-ares'].prefix.include),
+ 'setup.py')
+ filter_file(r'(\s+RE2_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['re2'].prefix.include),
+ 'setup.py')
diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py
index 490d1b67da..0159a69255 100644
--- a/var/spack/repos/builtin/packages/py-grpcio/package.py
+++ b/var/spack/repos/builtin/packages/py-grpcio/package.py
@@ -12,6 +12,7 @@ class PyGrpcio(PythonPackage):
homepage = "https://grpc.io/"
pypi = "grpcio/grpcio-1.32.0.tar.gz"
+ version('1.42.0', sha256='4a8f2c7490fe3696e0cdd566e2f099fb91b51bc75446125175c55581c2f7bc11')
version('1.39.0', sha256='57974361a459d6fe04c9ae0af1845974606612249f467bbd2062d963cb90f407')
version('1.38.1', sha256='1f79d8a24261e3c12ec3a6c25945ff799ae09874fd24815bc17c2dc37715ef6c')
version('1.38.0', sha256='abbf9c8c3df4d5233d5888c6cfa85c1bb68a6923749bd4dd1abc6e1e93986f17')
@@ -41,12 +42,14 @@ class PyGrpcio(PythonPackage):
depends_on('openssl')
depends_on('zlib')
depends_on('c-ares')
+ depends_on('re2+shared')
def setup_build_environment(self, env):
env.set('GRPC_PYTHON_BUILD_WITH_CYTHON', True)
env.set('GRPC_PYTHON_BUILD_SYSTEM_OPENSSL', True)
env.set('GRPC_PYTHON_BUILD_SYSTEM_ZLIB', True)
env.set('GRPC_PYTHON_BUILD_SYSTEM_CARES', True)
+ env.set('GRPC_PYTHON_BUILD_SYSTEM_RE2', True)
# https://github.com/grpc/grpc/pull/24449
env.set('GRPC_BUILD_WITH_BORING_SSL_ASM', '')
env.set('GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS', str(make_jobs))
@@ -59,3 +62,17 @@ class PyGrpcio(PythonPackage):
def patch(self):
if self.spec.satisfies('%fj'):
filter_file("-std=gnu99", "", "setup.py")
+
+ # use the spack packages
+ filter_file(r'(\s+SSL_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['openssl'].prefix.include),
+ 'setup.py')
+ filter_file(r'(\s+ZLIB_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['zlib'].prefix.include),
+ 'setup.py')
+ filter_file(r'(\s+CARES_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['c-ares'].prefix.include),
+ 'setup.py')
+ filter_file(r'(\s+RE2_INCLUDE = ).*',
+ r"\1('{0}',)".format(self.spec['re2'].prefix.include),
+ 'setup.py')
diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py
index e7e4b123b0..7e99067990 100644
--- a/var/spack/repos/builtin/packages/py-h5py/package.py
+++ b/var/spack/repos/builtin/packages/py-h5py/package.py
@@ -16,6 +16,8 @@ class PyH5py(PythonPackage):
maintainers = ['bryanherman', 'takluyver']
version('master', branch='master')
+ version('3.6.0', sha256='8752d2814a92aba4e2b2a5922d2782d0029102d99caaf3c201a566bc0b40db29')
+ version('3.5.0', sha256='77c7be4001ac7d3ed80477de5b6942501d782de1bbe4886597bdfec2a7ab821f')
version('3.4.0', sha256='ee1c683d91ab010d5e85cb61e8f9e7ee0d8eab545bf3dd50a9618f1d0e8f615e')
version('3.3.0', sha256='e0dac887d779929778b3cfd13309a939359cc9e74756fc09af7c527a82797186')
version('3.2.1', sha256='89474be911bfcdb34cbf0d98b8ec48b578c27a89fdb1ae4ee7513f1ef8d9249e')
diff --git a/var/spack/repos/builtin/packages/py-h5sh/package.py b/var/spack/repos/builtin/packages/py-h5sh/package.py
index 042d595b99..5db97659fd 100644
--- a/var/spack/repos/builtin/packages/py-h5sh/package.py
+++ b/var/spack/repos/builtin/packages/py-h5sh/package.py
@@ -10,15 +10,15 @@ class PyH5sh(PythonPackage):
"""Shell-like environment for HDF5."""
homepage = "https://github.com/sethrj/h5sh"
- url = "https://github.com/sethrj/h5sh/archive/v0.1.1.tar.gz"
+ pypi = "h5sh/h5sh-0.1.1.tar.gz"
maintainers = ['sethrj']
- version('0.1.1', sha256='111989d8200d1da8e150aee637a907e524ca0f98d5005a55587cba0d94d9c4a0')
+ version('0.1.1', sha256='ccd8fed532d479d297baef044265100a4fb9cd119bce6f43270f2ee9f63a2183')
depends_on('py-setuptools', type=('build', 'run'))
- depends_on('py-h5py', type=('build', 'run'))
- depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-h5py@2.7.1:', type=('build', 'run'))
+ depends_on('py-numpy@1.15:', type=('build', 'run'))
depends_on('py-prompt-toolkit@2:', type=('build', 'run'))
depends_on('py-pygments', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-hatchet/package.py b/var/spack/repos/builtin/packages/py-hatchet/package.py
index 2d58f51acf..0e78ac9572 100644
--- a/var/spack/repos/builtin/packages/py-hatchet/package.py
+++ b/var/spack/repos/builtin/packages/py-hatchet/package.py
@@ -22,7 +22,9 @@ class PyHatchet(PythonPackage):
version('1.0.1', sha256='e5a4b455ab6bfbccbce3260673d9af8d1e4b21e19a2b6d0b6c1e1d7727613b7a')
version('1.0.0', sha256='efd218bc9152abde0a8006489a2c432742f00283a114c1eeb6d25abc10f5862d')
- depends_on('python@2.7,3:', type=('build', 'run'))
+ # https://github.com/hatchet/hatchet/issues/428
+ depends_on('python@2.7:3.8', when='@:1.3.0', type=('build', 'run'))
+ depends_on('python@2.7:', when='@1.3.1:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-matplotlib', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-hep-ml/package.py b/var/spack/repos/builtin/packages/py-hep-ml/package.py
new file mode 100644
index 0000000000..903e3e9fce
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-hep-ml/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyHepMl(PythonPackage):
+ """Machine Learning for High Energy Physics"""
+
+ homepage = "https://github.com/arogozhnikov/hep_ml"
+ pypi = "hep_ml/hep_ml-0.7.0.tar.gz"
+
+ version('0.7.0', sha256='0402037064d78f5723106b385ad5f20df8f67cb312c57cb4ce3839c5616f328e')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.9:', type=('build', 'run'))
+ depends_on('py-scipy@0.15.0:', type=('build', 'run'))
+ depends_on('py-pandas@0.14.0:', type=('build', 'run'))
+ depends_on('py-scikit-learn@0.19:', type=('build', 'run'))
+ depends_on('py-theano@1.0.2:', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-hepdata-lib/package.py b/var/spack/repos/builtin/packages/py-hepdata-lib/package.py
new file mode 100644
index 0000000000..5ffa2570e3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-hepdata-lib/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyHepdataLib(PythonPackage):
+ """Library for getting your data into HEPData"""
+
+ homepage = "https://github.com/HEPData/hepdata_lib"
+ pypi = "hepdata_lib/hepdata_lib-0.9.0.tar.gz"
+
+ version('0.9.0', sha256='b7b194b8af0428f34094ac403f8794a672c82d85e33154161d6b3744cc2b9896')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('root+python', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-pyyaml@4:', type=('build', 'run'))
+ depends_on('py-future', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-pytest-runner', type='build')
+ depends_on('py-pytest-cov', type='build')
diff --git a/var/spack/repos/builtin/packages/py-hist/package.py b/var/spack/repos/builtin/packages/py-hist/package.py
new file mode 100644
index 0000000000..ac9d80403d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-hist/package.py
@@ -0,0 +1,31 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyHist(PythonPackage):
+ """Hist classes and utilities"""
+
+ homepage = "https://github.com/scikit-hep/hist"
+ pypi = "hist/hist-2.5.2.tar.gz"
+
+ version('2.5.2', sha256='0bafb8b956cc041f1b26e8f5663fb8d3b8f7673f56336facb84d8cfdc30ae2cf')
+
+ variant('plot', default='False',
+ description='Add support for drawing histograms')
+
+ depends_on('python@3.7:', type=('build', 'run'))
+ depends_on('py-setuptools@45:', type='build')
+ depends_on('py-setuptools-scm@3.4:+toml', type='build')
+ depends_on('py-boost-histogram@1.2.0:1.2', type=('build', 'run'))
+ depends_on('py-histoprint@2.2.0:', type=('build', 'run'))
+ depends_on('py-numpy@1.14.5:', type=('build', 'run'))
+ depends_on('py-typing-extensions@3.7:', type=('build', 'run'), when='^python@:3.7')
+
+ depends_on('py-matplotlib@3.0:', type=('build', 'run'), when='+plot')
+ depends_on('py-scipy@1.4:', type=('build', 'run'), when='+plot')
+ depends_on('py-iminuit@2:', type=('build', 'run'), when='+plot')
+ depends_on('py-mplhep@0.2.16:', type=('build', 'run'), when='+plot')
diff --git a/var/spack/repos/builtin/packages/py-histbook/package.py b/var/spack/repos/builtin/packages/py-histbook/package.py
new file mode 100644
index 0000000000..acd3c5a78e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-histbook/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyHistbook(PythonPackage):
+ """Versatile, high-performance histogram toolkit for Numpy."""
+
+ homepage = "https://github.com/scikit-hep/histbook"
+ pypi = "histbook/histbook-1.2.5.tar.gz"
+
+ version('1.2.5', sha256='76d1f143f8abccf5539029fbef8133db84f377fc7752ac9e7e6d19ac9a277967')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.8.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-histogrammar/package.py b/var/spack/repos/builtin/packages/py-histogrammar/package.py
new file mode 100644
index 0000000000..f0327610cb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-histogrammar/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyHistogrammar(PythonPackage):
+ """Composable histogram primitives for distributed data reduction."""
+
+ homepage = "https://histogrammar.github.io/histogrammar-docs"
+ pypi = "histogrammar/histogrammar-1.0.25.tar.gz"
+
+ version('1.0.25', sha256='01d5f99cdb8dce8f02dd1adbfcc530a097154f3696d7778d0ed596d06d5ce432')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.18.0:', type=('build', 'run'))
+ depends_on('py-tqdm', type=('build', 'run'))
+ depends_on('py-joblib@0.14.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-histoprint/package.py b/var/spack/repos/builtin/packages/py-histoprint/package.py
new file mode 100644
index 0000000000..a1de14692b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-histoprint/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyHistoprint(PythonPackage):
+ """Pretty print of NumPy (and other) histograms to the console"""
+
+ homepage = "https://github.com/scikit-hep/histoprint"
+ pypi = "histoprint/histoprint-2.2.0.tar.gz"
+
+ version('2.2.0', sha256='ef8b65f7926aaa989f076857b76291175245dd974804b408483091d1e28b00f6')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-setuptools-scm@3.4:+toml', type='build')
+ depends_on('py-click@7.0.0:', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-uhi@0.2.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-hpbandster/package.py b/var/spack/repos/builtin/packages/py-hpbandster/package.py
new file mode 100644
index 0000000000..08703b0c6d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-hpbandster/package.py
@@ -0,0 +1,35 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# Package automatically generated using 'pip2spack' converter
+
+
+class PyHpbandster(PythonPackage):
+ """
+ A distributed Hyperband implementation with lots of room for improvement
+ """
+
+ homepage = "https://github.com/automl/HpBandSter"
+ pypi = 'hpbandster/hpbandster-0.7.4.tar.gz'
+ maintainers = ['liuyangzhuan']
+
+ version('0.7.4', sha256='49ffc32688155b509e62f3617b52ae15a96c9bff2c996a23df83f279106c5921')
+ version('0.7.3', sha256='dd6c255f5dfe773a7f0c5ecf580b46a406d9f691303e2f849a14f7ae08ff9f13')
+ version('0.7.2', sha256='24dd3311b14fa76ab8111062ced670ff888e7e99cad07dcc3398361689c09f90')
+ version('0.7.1', sha256='41a55c95787eccd23def00f73013fbc9efad3cdc20d9e03270c7c959643dc5ff')
+ version('0.7.0', sha256='b6a46c73cb6a62e2f2d20984087a3458cea056aef5aa0fc0cd606bdd116eed94')
+ version('0.6.1', sha256='8812743b43b228dbf38fe2d5c5ecf238c6a742d02d8bdd264a2f193b96ca3b92')
+ version('0.6.0', sha256='26e69a2f84c8d41bea2fd703f489453a3e9301dcb62f15271b16a3db4ccf225d')
+ version('0.5.6', sha256='bc8a93638adda5cc0838c836402f18b456631363aefbfdf52942e9f8c7251893')
+
+ depends_on('python@3:', type=('build', 'run'))
+ depends_on('py-configspace', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-statsmodels', type=('build', 'run'))
+ depends_on('py-netifaces', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-serpent', type=('build', 'run'))
+ depends_on('py-pyro4', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-httpretty/package.py b/var/spack/repos/builtin/packages/py-httpretty/package.py
index 03980d7b23..8bb978f2f5 100644
--- a/var/spack/repos/builtin/packages/py-httpretty/package.py
+++ b/var/spack/repos/builtin/packages/py-httpretty/package.py
@@ -12,6 +12,7 @@ class PyHttpretty(PythonPackage):
homepage = "https://httpretty.readthedocs.io/en/latest/"
pypi = "httpretty/httpretty-1.1.3.tar.gz"
+ version('1.1.4', sha256='20de0e5dd5a18292d36d928cc3d6e52f8b2ac73daec40d41eb62dee154933b68')
version('1.1.3', sha256='229ade39175ea4324e767f29dc24e5f846fbc72bf80e1a919b2547a6574ff601')
depends_on('python@3:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-humanize/package.py b/var/spack/repos/builtin/packages/py-humanize/package.py
index 8240561773..5ad7992088 100644
--- a/var/spack/repos/builtin/packages/py-humanize/package.py
+++ b/var/spack/repos/builtin/packages/py-humanize/package.py
@@ -16,6 +16,15 @@ class PyHumanize(PythonPackage):
homepage = "https://github.com/jmoiron/humanize"
pypi = "humanize/humanize-0.5.1.tar.gz"
+ version('3.12.0', sha256='5ec1a66e230a3e31fb3f184aab9436ea13d4e37c168e0ffc345ae5bb57e58be6')
version('0.5.1', sha256='a43f57115831ac7c70de098e6ac46ac13be00d69abbf60bdcac251344785bb19')
+ depends_on('python@3.6:', when='@3:', type=('build', 'run'))
+ depends_on('python@3.5:', when='@1.1.0:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.5:', when='@1.0.0', type=('build', 'run'))
+ depends_on('py-setuptools@42:', when='@3.2:', type=('build', 'run'))
+ depends_on('py-setuptools', when='@3:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools-scm+toml@3.4:', when='@3.2:', type='build')
+ depends_on('py-setuptools-scm', when='@1:', type='build')
+ depends_on('py-importlib-metadata', when='@3.12: ^python@:3.7', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-hyperlink/package.py b/var/spack/repos/builtin/packages/py-hyperlink/package.py
new file mode 100644
index 0000000000..fbaa2adf72
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-hyperlink/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyHyperlink(PythonPackage):
+ """A featureful, immutable, and correct URL for Python."""
+
+ homepage = "https://github.com/python-hyper/hyperlink"
+ pypi = "hyperlink/hyperlink-21.0.0.tar.gz"
+
+ version('21.0.0', sha256='427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b')
+
+ depends_on('python@2.6:2,3.4:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-idna@2.5:', type=('build', 'run'))
+ depends_on('py-typing', when='^python@:3.4', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-idna/package.py b/var/spack/repos/builtin/packages/py-idna/package.py
index 3cd8026234..873a26bee2 100644
--- a/var/spack/repos/builtin/packages/py-idna/package.py
+++ b/var/spack/repos/builtin/packages/py-idna/package.py
@@ -13,11 +13,14 @@ class PyIdna(PythonPackage):
homepage = "https://github.com/kjd/idna"
pypi = "idna/idna-3.2.tar.gz"
+ version('3.3', sha256='9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d')
version('3.2', sha256='467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3')
version('2.9', sha256='7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb')
version('2.8', sha256='c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407')
version('2.5', sha256='3cb5ce08046c4e3a560fc02f138d0ac63e00f8ce5901a56b32ec8b7994082aab')
- depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
- depends_on('python@3.5:', type=('build', 'run'), when='@3.2')
+ depends_on('python@3.5:', when='@3.2:', type=('build', 'run'))
+ depends_on('python@3.4:', when='@3:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.4:', when='@2.8:2', type=('build', 'run'))
+ depends_on('python@2.6:', when='@:2.7', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'link'))
diff --git a/var/spack/repos/builtin/packages/py-imageio/package.py b/var/spack/repos/builtin/packages/py-imageio/package.py
index b69b0e3ae4..acd47c85c9 100644
--- a/var/spack/repos/builtin/packages/py-imageio/package.py
+++ b/var/spack/repos/builtin/packages/py-imageio/package.py
@@ -7,14 +7,17 @@ from spack import *
class PyImageio(PythonPackage):
- """ Imageio is a Python library that provides an easy interface
+ """ Python library for reading and writing image data.
+
+ Imageio is a Python library that provides an easy interface
to read and write a wide range of image data, including animated
images, video, volumetric data, and scientific formats. It is
cross-platform, runs on Python 2.7 and 3.4+, and is easy to install."""
- homepage = "https://imageio.github.io/"
+ homepage = "https://github.com/imageio/imageio"
pypi = "imageio/imageio-2.3.0.tar.gz"
+ version('2.10.3', sha256='469c59fe71c81cdc41c84f842d62dd2739a08fac8cb85f5a518a92a6227e2ed6')
version('2.9.0', sha256='52ddbaeca2dccf53ba2d6dec5676ca7bc3b2403ef8b37f7da78b7654bb3e10f0')
version('2.5.0', sha256='42e65aadfc3d57a1043615c92bdf6319b67589e49a0aae2b985b82144aceacad')
version('2.4.1', sha256='16b8077bc8a5fa7a58b3e744f7ecbb156d8c088132df31e0f4f546c98de3514a')
@@ -23,9 +26,10 @@ class PyImageio(PythonPackage):
# TODO: Add variants for plugins, and optional dependencies
# Fix for python 2 if needed.
- depends_on('py-numpy', type=('build', 'run'))
- depends_on('pil', type=('build', 'run'))
- depends_on('python@2.7:2.8,3.4:', type=('build', 'run'), when='@:2.5.0')
- depends_on('python@3.5:', type=('build', 'run'), when='@2.9.0:')
- depends_on('py-setuptools', type='build')
- depends_on('ffmpeg', type='run')
+ depends_on('python@3.5:', when='@2.9.0:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.4:', when='@:2.5.0', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('pil@8.3.2:', when='@2.10:', type=('build', 'run'))
+ depends_on('pil', type=('build', 'run'))
+ depends_on('ffmpeg', type='run')
diff --git a/var/spack/repos/builtin/packages/py-imagesize/package.py b/var/spack/repos/builtin/packages/py-imagesize/package.py
index 78b4fe8477..d17b30ea1b 100644
--- a/var/spack/repos/builtin/packages/py-imagesize/package.py
+++ b/var/spack/repos/builtin/packages/py-imagesize/package.py
@@ -13,7 +13,9 @@ class PyImagesize(PythonPackage):
homepage = "https://github.com/shibukawa/imagesize_py"
pypi = "imagesize/imagesize-0.7.1.tar.gz"
- version('1.1.0', sha256='f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5')
+ version('1.3.0', sha256='cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d')
+ version('1.1.0', sha256='f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5')
version('0.7.1', sha256='0ab2c62b87987e3252f89d30b7cedbec12a01af9274af9ffa48108f2c13c6062')
+ depends_on('python@2.7:2,3.4:', when='@1.2:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-iminuit/package.py b/var/spack/repos/builtin/packages/py-iminuit/package.py
index 787cf7e2fe..65d0b16914 100644
--- a/var/spack/repos/builtin/packages/py-iminuit/package.py
+++ b/var/spack/repos/builtin/packages/py-iminuit/package.py
@@ -11,11 +11,15 @@ class PyIminuit(PythonPackage):
pypi = "iminuit/iminuit-1.2.tar.gz"
+ version('2.8.4', sha256='4b09189f3094896cfc68596adc95b7f1d92772e1de1424e5dc4dd81def56e8b0')
+ version('1.5.2', sha256='0b54f4d4fc3175471398b573d24616ddb8eb7d63808aa370cfc71fc1d636a1fd')
version('1.3.7', sha256='9173e52cc4a0c0bda13ebfb862f9b074dc5de345b23cb15c1150863aafd8a26c')
version('1.3.6', sha256='d79a197f305d4708a0e3e52b0a6748c1a6997360d2fbdfd09c022995a6963b5e')
version('1.2', sha256='7651105fc3f186cfb5742f075ffebcc5088bf7797d8ed124c00977eebe0d1c64')
# Required dependencies
+ depends_on('python@3.6:', type=('build', 'run'), when='@2.6.1:')
depends_on('py-setuptools', type='build')
depends_on('py-numpy', type=('build', 'run'), when='@1.3:1.3.6')
depends_on('py-numpy@1.11.3:', type=('build', 'run'), when='@1.3.7:')
+ depends_on('py-cmake', type='build', when='@2.8.4')
diff --git a/var/spack/repos/builtin/packages/py-importlib-metadata/package.py b/var/spack/repos/builtin/packages/py-importlib-metadata/package.py
index 961e2908f0..7d0bfc99bb 100644
--- a/var/spack/repos/builtin/packages/py-importlib-metadata/package.py
+++ b/var/spack/repos/builtin/packages/py-importlib-metadata/package.py
@@ -12,6 +12,7 @@ class PyImportlibMetadata(PythonPackage):
homepage = "https://importlib-metadata.readthedocs.io/"
pypi = "importlib_metadata/importlib_metadata-1.2.0.tar.gz"
+ version('4.8.2', sha256='75bdec14c397f528724c1bfd9709d660b33a4d2e77387a3358f20b848bb5e5fb')
version('4.8.1', sha256='f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1')
version('4.6.1', sha256='079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac')
version('3.10.1', sha256='c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1')
diff --git a/var/spack/repos/builtin/packages/py-incremental/package.py b/var/spack/repos/builtin/packages/py-incremental/package.py
new file mode 100644
index 0000000000..4fb8cd26c0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-incremental/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyIncremental(PythonPackage):
+ """A small library that versions your Python projects."""
+
+ homepage = "https://github.com/twisted/incremental"
+ pypi = "incremental/incremental-21.3.0.tar.gz"
+
+ version('21.3.0', sha256='02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-intensity-normalization/package.py b/var/spack/repos/builtin/packages/py-intensity-normalization/package.py
new file mode 100644
index 0000000000..0a941d2b28
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-intensity-normalization/package.py
@@ -0,0 +1,27 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyIntensityNormalization(PythonPackage):
+ """Normalize intensities of images from various MRI modalities"""
+
+ homepage = "https://github.com/jcreinhold/intensity-normalization"
+ pypi = "intensity-normalization/intensity-normalization-2.1.1.tar.gz"
+
+ version('2.1.1', sha256='686b86754a9a520a03f793cb15c87e945f68ede78ac0ad1b3564c5d5b7ac9486')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type=('build',))
+ depends_on('py-pytest-runner', type=('build',))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-nibabel', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scikit-fuzzy', type=('build', 'run'))
+ depends_on('py-scikit-image', type=('build', 'run'))
+ depends_on('py-scikit-learn', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-statsmodels', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-interface-meta/package.py b/var/spack/repos/builtin/packages/py-interface-meta/package.py
new file mode 100644
index 0000000000..d7b5f95d9b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-interface-meta/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyInterfaceMeta(PythonPackage):
+ """A convenient way to expose an extensible API with enforced method
+ signatures and consistent documentation."""
+
+ homepage = "https://github.com/matthewwardrop/interface_meta"
+ pypi = "interface_meta/interface_meta-1.2.4.tar.gz"
+
+ version('1.2.4', sha256='4c7725dd4b80f97b7eecfb26023e1a8a7cdbb6d6a7207a8e93f9d4bfef9ee566')
+
+ depends_on('python@3.4:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-setupmeta', type='build')
diff --git a/var/spack/repos/builtin/packages/py-ipykernel/package.py b/var/spack/repos/builtin/packages/py-ipykernel/package.py
index e68da51bf9..3d2fbfd5a1 100644
--- a/var/spack/repos/builtin/packages/py-ipykernel/package.py
+++ b/var/spack/repos/builtin/packages/py-ipykernel/package.py
@@ -13,6 +13,7 @@ class PyIpykernel(PythonPackage):
version('6.4.1', sha256='df3355e5eec23126bc89767a676c5f0abfc7f4c3497d118c592b83b316e8c0cd')
version('6.2.0', sha256='4439459f171d77f35b7f7e72dace5d7c2dd10a5c9e2c22b173ad9048fbfe7656')
version('6.0.2', sha256='7fb3e370dbb481b012b74bed4e794d2d16eb2a83930b31e6d8d030b9fdb4d5b4')
+ version('5.5.6', sha256='4ea44b90ae1f7c38987ad58ea0809562a17c2695a0499644326f334aecd369ec')
version('5.5.5', sha256='e976751336b51082a89fc2099fb7f96ef20f535837c398df6eab1283c2070884')
version('5.3.4', sha256='9b2652af1607986a1b231c62302d070bc0534f564c393a5d9d130db9abbbe89d')
version('5.1.1', sha256='f0e962052718068ad3b1d8bcc703794660858f58803c3798628817f492a8769c')
@@ -46,7 +47,7 @@ class PyIpykernel(PythonPackage):
depends_on('py-traitlets@4.1.0:', type=('build', 'run'))
depends_on('py-traitlets@4.1.0:5', when='@6:', type=('build', 'run'))
depends_on('py-jupyter-client', type=('build', 'run'))
- depends_on('py-jupyter-client@:6', when='@6.0.2:', type=('build', 'run'))
+ depends_on('py-jupyter-client@:6', when='@6.0.2:6.1', type=('build', 'run'))
depends_on('py-jupyter-client@:7', when='@6.2:', type=('build', 'run'))
depends_on('py-tornado@4.0:', when='@:4', type=('build', 'run'))
depends_on('py-tornado@4.2:', when='@5', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-itsdangerous/package.py b/var/spack/repos/builtin/packages/py-itsdangerous/package.py
index 168af1fad0..09959518d6 100644
--- a/var/spack/repos/builtin/packages/py-itsdangerous/package.py
+++ b/var/spack/repos/builtin/packages/py-itsdangerous/package.py
@@ -12,8 +12,10 @@ class PyItsdangerous(PythonPackage):
homepage = "https://github.com/mitsuhiko/itsdangerous"
pypi = "itsdangerous/itsdangerous-1.1.0.tar.gz"
+ version('2.0.1', sha256='9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0')
version('1.1.0', sha256='321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19')
version('0.24', sha256='cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519')
+ depends_on('python@3.6:', when='@2:', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-jeepney/package.py b/var/spack/repos/builtin/packages/py-jeepney/package.py
index e4004afe8a..356f14b302 100644
--- a/var/spack/repos/builtin/packages/py-jeepney/package.py
+++ b/var/spack/repos/builtin/packages/py-jeepney/package.py
@@ -10,6 +10,7 @@ class PyJeepney(PythonPackage):
homepage = "https://gitlab.com/takluyver/jeepney"
pypi = "jeepney/jeepney-0.4.3.tar.gz"
+ version('0.7.1', sha256='fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f')
version('0.6.0', sha256='7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657')
version('0.4.3', sha256='3479b861cc2b6407de5188695fa1a8d57e5072d7059322469b62628869b8e36e')
diff --git a/var/spack/repos/builtin/packages/py-jinja2/package.py b/var/spack/repos/builtin/packages/py-jinja2/package.py
index 9367d18aa9..c2129d4c09 100644
--- a/var/spack/repos/builtin/packages/py-jinja2/package.py
+++ b/var/spack/repos/builtin/packages/py-jinja2/package.py
@@ -14,7 +14,8 @@ class PyJinja2(PythonPackage):
homepage = "https://palletsprojects.com/p/jinja/"
pypi = "Jinja2/Jinja2-2.10.3.tar.gz"
- version('3.0.1', sha256='703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4')
+ version('3.0.3', sha256='611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7')
+ version('3.0.1', sha256='703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4')
version('2.11.3', sha256='a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6')
version('2.10.3', sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')
version('2.10.1', sha256='065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013')
diff --git a/var/spack/repos/builtin/packages/py-jsonpickle/package.py b/var/spack/repos/builtin/packages/py-jsonpickle/package.py
index 6bf9271360..d58a887f49 100644
--- a/var/spack/repos/builtin/packages/py-jsonpickle/package.py
+++ b/var/spack/repos/builtin/packages/py-jsonpickle/package.py
@@ -10,9 +10,11 @@ class PyJsonpickle(PythonPackage):
homepage = "https://github.com/jsonpickle/jsonpickle"
pypi = "jsonpickle/jsonpickle-1.4.1.tar.gz"
+ version('2.0.0', sha256='0be49cba80ea6f87a168aa8168d717d00c6ca07ba83df3cec32d3b30bfe6fb9a')
version('1.4.1', sha256='e8d4b7cd0bd6826001a74377df1079a76ad8bae0f909282de2554164c837c8ba')
depends_on('python@2.7:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@42:', type='build', when='@2.0.0:')
depends_on('py-setuptools-scm@3.4.1:+toml', type='build')
depends_on("py-importlib-metadata", when="^python@:3.7", type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py
new file mode 100644
index 0000000000..3246d838cd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyJupyterServerMathjax(PythonPackage):
+ """MathJax resources as a Jupyter Server Extension."""
+
+ homepage = "http://jupyter.org/"
+ pypi = "jupyter_server_mathjax/jupyter_server_mathjax-0.2.3.tar.gz"
+
+ version('0.2.3', sha256='564e8d1272019c6771208f577b5f9f2b3afb02b9e2bff3b34c042cef8ed84451')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-wheel', type='build')
+ depends_on('py-jupyter-packaging', type='build')
+ depends_on('py-jupyter-server@1.1:1', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-keras2onnx/package.py b/var/spack/repos/builtin/packages/py-keras2onnx/package.py
new file mode 100644
index 0000000000..60db5d7484
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-keras2onnx/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyKeras2onnx(PythonPackage):
+ """Converts Machine Learning models to ONNX for use in Windows ML"""
+
+ homepage = "https://github.com/onnx/keras-onnx"
+
+ url = "https://github.com/onnx/keras-onnx/archive/refs/tags/v1.7.0.tar.gz"
+
+ version('1.7.0', sha256='8ec9c4e1c1f870d420934d1aa7cbc9faab80c6af366900bf95e5f48280c0d199')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-protobuf', type=('build', 'run'))
+ depends_on('py-requests', type=('build', 'run'))
+ depends_on('py-onnx', type=('build', 'run'))
+ depends_on('py-onnxconverter-common@1.7.0:', type=('build', 'run'))
+ depends_on('py-fire', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-keyring/package.py b/var/spack/repos/builtin/packages/py-keyring/package.py
index 2014acc389..325dde48aa 100644
--- a/var/spack/repos/builtin/packages/py-keyring/package.py
+++ b/var/spack/repos/builtin/packages/py-keyring/package.py
@@ -14,10 +14,14 @@ class PyKeyring(PythonPackage):
homepage = "https://github.com/jaraco/keyring"
pypi = "keyring/keyring-23.0.1.tar.gz"
+ version('23.2.1', sha256='6334aee6073db2fb1f30892697b1730105b5e9a77ce7e61fca6b435225493efe')
+ version('23.2.0', sha256='1e1970dcecde00c59ff6033d69cee3b283cd0d7cbad78b0dc4cdd15c8a28bcf8')
+ version('23.1.0', sha256='b7e0156667f5dcc73c1f63a518005cd18a4eb23fe77321194fefcc03748b21a4')
version('23.0.1', sha256='045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8')
depends_on('python@3.6:', type=('build', 'run'))
- depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-setuptools@56:', type='build', when='@23.1.0:')
depends_on('py-setuptools-scm@3.4.1:+toml', type='build')
depends_on('py-importlib-metadata@3.6:', type=('build', 'run'))
depends_on('py-secretstorage@3.2:', type=('build', 'run'), when='platform=linux')
diff --git a/var/spack/repos/builtin/packages/py-kornia/package.py b/var/spack/repos/builtin/packages/py-kornia/package.py
index ccc21b5c2b..b87b7ae72c 100644
--- a/var/spack/repos/builtin/packages/py-kornia/package.py
+++ b/var/spack/repos/builtin/packages/py-kornia/package.py
@@ -12,6 +12,7 @@ class PyKornia(PythonPackage):
homepage = "https://www.kornia.org/"
pypi = "kornia/kornia-0.5.10.tar.gz"
+ version('0.6.2', sha256='eea722b3ff2f227a9ef8088cdab480cd40dd91d9138649bfd92cfa668204eea9')
version('0.6.1', sha256='f638fb3309f88666545866c162f510b6d485fd8f7131d5570d4e6c0d295fdcd6')
version('0.5.10', sha256='428b4b934a2ba7360cc6cba051ed8fd96c2d0f66611fdca0834e82845f14f65d')
diff --git a/var/spack/repos/builtin/packages/py-laspy/package.py b/var/spack/repos/builtin/packages/py-laspy/package.py
new file mode 100644
index 0000000000..756ca9cc4e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-laspy/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyLaspy(PythonPackage):
+ """Native Python ASPRS LAS read/write library."""
+
+ homepage = "https://github.com/laspy/laspy"
+ pypi = "laspy/laspy-2.0.3.tar.gz"
+
+ version('2.0.3', sha256='95c6367bc3a7c1e0d8dc118ae4a6b038bf9e8ad3e60741ecb8d59c36d32f822a')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-law/package.py b/var/spack/repos/builtin/packages/py-law/package.py
new file mode 100644
index 0000000000..432f54191e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-law/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyLaw(PythonPackage):
+ """Build large-scale task workflows using luigi,
+ remote job submission, remote targets, and environment"""
+
+ homepage = "https://github.com/riga/law"
+ pypi = "law/law-0.1.6.tar.gz"
+
+ version('0.1.6', sha256='17c2c1837080590bff4d2e7228bfb418733f11b60e2bac8f589e68da78cf2ab8')
+
+ depends_on('python@2.7:2,3.3:3', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six@1.13:', type=('build', 'run'))
+ depends_on('py-luigi@2.8.2:2', type=('build', 'run'), when='^python@:2.7')
+ depends_on('py-luigi@2.8.2:', type=('build', 'run'), when='^python@3:')
diff --git a/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py b/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py
index 2dac51e390..c5b537de91 100644
--- a/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py
+++ b/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py
@@ -12,10 +12,14 @@ class PyLazyObjectProxy(PythonPackage):
homepage = "https://github.com/ionelmc/python-lazy-object-proxy"
pypi = "lazy-object-proxy/lazy-object-proxy-1.3.1.tar.gz"
+ version('1.6.0', sha256='489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726')
version('1.4.3', sha256='f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0')
version('1.3.1', sha256='eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.6:', type=('build', 'run'), when='@1.6.0:')
depends_on('py-setuptools-scm@3.3.1:', type='build', when='@1.4.0:')
+ depends_on('py-setuptools-scm@3.3.1:5', type='build', when='@1.6.0:')
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@30.3.0:', type='build', when='@1.6.0:')
diff --git a/var/spack/repos/builtin/packages/py-lhsmdu/package.py b/var/spack/repos/builtin/packages/py-lhsmdu/package.py
new file mode 100644
index 0000000000..2a8b61b4b8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-lhsmdu/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# Package automatically generated using 'pip2spack' converter
+
+
+class PyLhsmdu(PythonPackage):
+ """Latin Hypercube Sampling with Multi-Dimensional Uniformity (LHS-MDU)
+ from Deutsch and Deutsch, Latin hypercube sampling with multidimensional
+ uniformity."""
+
+ homepage = "http://github.com/sahilm89/lhsmdu"
+ pypi = 'lhsmdu/lhsmdu-1.1.tar.gz'
+ maintainers = ['liuyangzhuan']
+
+ version('1.1', sha256='4bc1df6b9cdd27bae0bff75cf1693f455ba32e4fa87ca9a932f60696607fe712')
+ version('0.1', sha256='ef462054b354cd20b10c6d80876c8fdb552a8d2e23eaf74179dc91956d68d32a')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type='build')
+ depends_on('py-scipy', type='build')
diff --git a/var/spack/repos/builtin/packages/py-liac-arff/package.py b/var/spack/repos/builtin/packages/py-liac-arff/package.py
new file mode 100644
index 0000000000..8492fd694c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-liac-arff/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyLiacArff(PythonPackage):
+ """The liac-arff module implements functions to read and
+ write ARFF files in Python."""
+
+ homepage = "https://github.com/renatopp/liac-arff"
+ pypi = "liac-arff/liac-arff-2.5.0.tar.gz"
+
+ version('2.5.0', sha256='3220d0af6487c5aa71b47579be7ad1d94f3849ff1e224af3bf05ad49a0b5c4da')
+
+ depends_on('python@2.7:2.999,3.5:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython', type='build')
diff --git a/var/spack/repos/builtin/packages/py-lizard/package.py b/var/spack/repos/builtin/packages/py-lizard/package.py
new file mode 100644
index 0000000000..bdf9e6a88e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-lizard/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyLizard(PythonPackage):
+ """A code analyzer without caring the C/C++ header files.
+ It works with Java, C/C++, JavaScript, Python, Ruby,
+ Swift, Objective C. Metrics includes cyclomatic
+ complexity number etc."""
+
+ homepage = "http://www.lizard.ws/"
+ pypi = "lizard/lizard-1.17.9.tar.gz"
+
+ version('1.17.9', sha256='76ee0e631d985bea1dd6521a03c6c2fa9dce5a2248b3d26c49890e9e085b7aed')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-lockfile/package.py b/var/spack/repos/builtin/packages/py-lockfile/package.py
index 8de746add6..4862f94b2b 100644
--- a/var/spack/repos/builtin/packages/py-lockfile/package.py
+++ b/var/spack/repos/builtin/packages/py-lockfile/package.py
@@ -18,8 +18,11 @@ class PyLockfile(PythonPackage):
possibilities it provides than as production-quality code.
"""
pypi = "lockfile/lockfile-0.10.2.tar.gz"
+ homepage = "https://launchpad.net/pylockfile"
+ version('0.12.2', sha256='6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799')
version('0.10.2', sha256='9e42252f17d1dd89ee31745e0c4fbe58862c25147eb0ef5295c9cd9bcb4ea2c1')
depends_on("py-setuptools", type='build')
depends_on("py-pbr", type='build')
+ depends_on("py-pbr@1.8:", type='build', when='@0.12.2:')
diff --git a/var/spack/repos/builtin/packages/py-lws/package.py b/var/spack/repos/builtin/packages/py-lws/package.py
new file mode 100644
index 0000000000..1bcfbd2b5e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-lws/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack import *
+
+
+class PyLws(PythonPackage):
+ """Fast spectrogram phase recovery using Local Weighted Sums"""
+
+ homepage = "https://pypi.org/project/lws/"
+ pypi = "lws/lws-1.2.6.tar.gz"
+
+ version('1.2.6', sha256='ac94834832aadfcd53fcf4a77e1d95155063b39adbce14c733f8345bdac76e87')
+
+ depends_on('python@3:', type=('build', 'run'))
+ depends_on('py-cython', type='build')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+
+ def setup_build_environment(self, env):
+ env.set('LWS_USE_CYTHON', 1)
diff --git a/var/spack/repos/builtin/packages/py-lxml/package.py b/var/spack/repos/builtin/packages/py-lxml/package.py
index 0a4cfe54f9..7f4cba072e 100644
--- a/var/spack/repos/builtin/packages/py-lxml/package.py
+++ b/var/spack/repos/builtin/packages/py-lxml/package.py
@@ -13,6 +13,7 @@ class PyLxml(PythonPackage):
homepage = "https://lxml.de/"
pypi = "lxml/lxml-4.6.1.tar.gz"
+ version('4.6.4', sha256='daf9bd1fee31f1c7a5928b3e1059e09a8d683ea58fb3ffc773b6c88cb8d1399c')
version('4.6.3', sha256='39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468')
version('4.6.1', sha256='c152b2e93b639d1f36ec5a8ca24cde4a8eefb2b6b83668fcd8e83a67badcb367')
version('4.5.2', sha256='cdc13a1682b2a6241080745b1953719e7fe0850b40a5c71ca574f090a1391df6')
@@ -23,8 +24,8 @@ class PyLxml(PythonPackage):
version('2.3', sha256='eea1b8d29532739c1383cb4794c5eacd6176f0972b59e8d29348335b87ff2e66')
variant('html5', default=False, description='Enable html5lib backend')
- variant('htmlsoup', default=False, description='Enable BeautifulSoup4 backend')
- variant('cssselect', default=False, description='Enable cssselect module')
+ variant('htmlsoup', default=False, description='Enable BeautifulSoup4 backend')
+ variant('cssselect', default=False, description='Enable cssselect module')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-lz4/package.py b/var/spack/repos/builtin/packages/py-lz4/package.py
index 57fcf26afe..f60e0eeb4a 100644
--- a/var/spack/repos/builtin/packages/py-lz4/package.py
+++ b/var/spack/repos/builtin/packages/py-lz4/package.py
@@ -12,7 +12,12 @@ class PyLz4(PythonPackage):
homepage = "https://github.com/python-lz4/python-lz4"
pypi = "lz4/lz4-3.1.0.tar.gz"
+ version('3.1.3', sha256='081ef0a3b5941cb03127f314229a1c78bd70c9c220bb3f4dd80033e707feaa18')
version('3.1.0', sha256='debe75513db3eb9e5cdcd82a329ff38374b6316ab65b848b571e0404746c1e05')
- depends_on('python@3.4:', type=('build', 'run'))
+ depends_on('python@3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools-scm', type='build')
+ depends_on('py-pkgconfig', type='build')
+ depends_on('lz4@1.7.5:')
+ depends_on('py-future', when='^python@:2', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-make/package.py b/var/spack/repos/builtin/packages/py-make/package.py
new file mode 100644
index 0000000000..4ec9a68fc4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-make/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyMake(PythonPackage):
+ """Create project layout from jinja2 templates"""
+
+ homepage = "https://github.com/fholmer/make"
+ git = "https://github.com/fholmer/make.git"
+
+ version('0.1.6.post2', commit='ce2ef5834837a35dba5f2bea8866b61c8907c83a')
+ version('0.1.6', commit='c6e2615d01d8d5f58181e39d0f594fe5baae3c5f')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-jinja2', type=('build', 'run'))
+ depends_on('py-jinja2-time', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-mako/package.py b/var/spack/repos/builtin/packages/py-mako/package.py
index 8ae2da458b..1db1bf872d 100644
--- a/var/spack/repos/builtin/packages/py-mako/package.py
+++ b/var/spack/repos/builtin/packages/py-mako/package.py
@@ -12,9 +12,11 @@ class PyMako(PythonPackage):
pypi = "Mako/Mako-1.0.1.tar.gz"
+ version('1.1.5', sha256='169fa52af22a91900d852e937400e79f535496191c63712e3b9fda5a9bed6fc3')
version('1.1.4', sha256='17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab')
version('1.0.4', sha256='fed99dbe4d0ddb27a33ee4910d8708aca9ef1fe854e668387a9ab9a90cbf9059')
version('1.0.1', sha256='45f0869febea59dab7efd256fb451c377cbb7947bef386ff0bb44627c31a8d1c')
depends_on('py-setuptools', type='build')
depends_on('py-markupsafe@0.9.2:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.4:', when='@1.1.0:')
diff --git a/var/spack/repos/builtin/packages/py-markdown/package.py b/var/spack/repos/builtin/packages/py-markdown/package.py
index 5fa98f749f..6de296fc64 100644
--- a/var/spack/repos/builtin/packages/py-markdown/package.py
+++ b/var/spack/repos/builtin/packages/py-markdown/package.py
@@ -17,6 +17,7 @@ class PyMarkdown(PythonPackage):
homepage = "https://python-markdown.github.io/"
pypi = "markdown/Markdown-2.6.11.tar.gz"
+ version('3.3.4', sha256='31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49')
version('3.1.1', sha256='2e50876bcdd74517e7b71f3e7a76102050edec255b3983403f1a63e7c8a41e7a')
version('2.6.11', sha256='a856869c7ff079ad84a3e19cd87a64998350c2b94e9e08e44270faef33400f81')
version('2.6.7', sha256='daebf24846efa7ff269cfde8c41a48bb2303920c7b2c7c5e04fa82e6282d05c0')
@@ -34,5 +35,8 @@ class PyMarkdown(PythonPackage):
depends_on('python@2.7:2.8,3.2:3.4', when='@:2.6.7')
depends_on('python@2.7:2.8,3.2:3.6', when='@2.6.8:2.6.11')
depends_on('python@2.7:2.8,3.3.5:', when='@3.1.1:')
+ depends_on('python@3.6:', when='@3.3.4:')
depends_on('py-setuptools', type='build', when='@2.6.11:')
+ depends_on('py-setuptools@36.6:', type='build', when='@3.1:')
+ depends_on('py-importlib-metadata', type=('build', 'run'), when='@3.3.4: ^python@:3.7')
diff --git a/var/spack/repos/builtin/packages/py-matplotlib/matplotlibrc.patch b/var/spack/repos/builtin/packages/py-matplotlib/matplotlibrc.patch
new file mode 100644
index 0000000000..bbc6b294da
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-matplotlib/matplotlibrc.patch
@@ -0,0 +1,11 @@
+--- a/setup.py 2021-09-23 23:57:06.000000000 -0500
++++ b/setup.py 2021-11-18 14:29:50.000000000 -0600
+@@ -212,7 +212,7 @@
+ idx for idx, line in enumerate(template_lines)
+ if "#backend:" in line]
+ template_lines[backend_line_idx] = (
+- "#backend: {}".format(setupext.options["backend"])
++ "#backend: {}\n".format(setupext.options["backend"])
+ if setupext.options["backend"]
+ else "##backend: Agg")
+ path.write_text("".join(template_lines))
diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py
index c47af8ba14..2e7f7e5869 100644
--- a/var/spack/repos/builtin/packages/py-matplotlib/package.py
+++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
import sys
@@ -24,6 +25,8 @@ class PyMatplotlib(PythonPackage):
'matplotlib.testing.jpl_units', 'pylab'
]
+ version('3.5.1', sha256='b2e9810e09c3a47b73ce9cab5a72243a1258f61e7900969097a817232246ce1c')
+ version('3.5.0', sha256='38892a254420d95594285077276162a5e9e9c30b6da08bdc2a4d53331ad9a6fa')
version('3.4.3', sha256='fc4f526dfdb31c9bd6b8ca06bf9fab663ca12f3ec9cdf4496fb44bc680140318')
version('3.4.2', sha256='d8d994cefdff9aaba45166eb3de4f5211adb4accac85cbf97137e98f26ea0219')
version('3.4.1', sha256='84d4c4f650f356678a5d658a43ca21a41fca13f9b8b00169c0b76e6a6a948908')
@@ -48,10 +51,6 @@ class PyMatplotlib(PythonPackage):
version('2.2.2', sha256='4dc7ef528aad21f22be85e95725234c5178c0f938e2228ca76640e5e84d8cde8')
version('2.0.2', sha256='0ffbc44faa34a8b1704bc108c451ecf87988f900ef7ce757b8e2e84383121ff1')
version('2.0.0', sha256='36cf0985829c1ab2b8b1dae5e2272e53ae681bf33ab8bedceed4f0565af5f813')
- version('1.5.3', sha256='a0a5dc39f785014f2088fed2c6d2d129f0444f71afbb9c44f7bdf1b14d86ebbc', deprecated=True)
- version('1.5.1', sha256='3ab8d968eac602145642d0db63dd8d67c85e9a5444ce0e2ecb2a8fedc7224d40', deprecated=True)
- version('1.4.3', sha256='61f201c6a82e89e4d9e324266203fad44f95fd8f36d8eec0d8690273e1182f75', deprecated=True)
- version('1.4.2', sha256='17a3c7154f152d8dfed1f37517c0a8c5db6ade4f6334f684989c36dab84ddb54', deprecated=True)
# https://matplotlib.org/tutorials/introductory/usage.html#backends
# From `lib/matplotlib/rcsetup.py`:
@@ -84,7 +83,7 @@ class PyMatplotlib(PythonPackage):
variant('fonts', default=False,
description='Enable support for system font detection')
- # https://matplotlib.org/users/installing.html#dependencies
+ # https://matplotlib.org/stable/devel/dependencies.html
# Required dependencies
extends('python', ignore=r'bin/nosetests.*$|bin/pbr$')
depends_on('python@2.7:2.8,3.4:', when='@:2', type=('build', 'link', 'run'))
@@ -99,12 +98,17 @@ class PyMatplotlib(PythonPackage):
depends_on('libpng@1.2:')
depends_on('py-setuptools', type=('build', 'run')) # See #3813
depends_on('py-certifi@2020.6.20:', when='@3.3.1:', type='build')
+ depends_on('py-setuptools-scm@4:', when='@3.5:', type='build')
+ depends_on('py-setuptools-scm-git-archive', when='@3.5:', type='build')
+ depends_on('py-cycler@0.10:', type=('build', 'run'))
+ depends_on('py-fonttools@4.22:', when='@3.5:', type=('build', 'run'))
+ depends_on('py-kiwisolver@1.0.1:', type=('build', 'run'), when='@2.2.0:')
depends_on('py-numpy@1.11:', type=('build', 'run'))
depends_on('py-numpy@1.15:', when='@3.3:', type=('build', 'run'))
depends_on('py-numpy@1.16:', when='@3.4:', type=('build', 'run'))
- depends_on('py-cycler@0.10:', type=('build', 'run'))
- depends_on('py-kiwisolver@1.0.1:', type=('build', 'run'), when='@2.2.0:')
- depends_on('pil@6.2.0:', when='@3.3:', type=('build', 'run'))
+ depends_on('py-numpy@1.17:', when='@3.5:', type=('build', 'run'))
+ depends_on('py-packaging', when='@3.5:', type=('build', 'run'))
+ depends_on('pil@6.2:', when='@3.3:', type=('build', 'run'))
depends_on('py-pyparsing@2.0.3,2.0.5:2.1.1,2.1.3:2.1.5,2.1.7:', type=('build', 'run'))
depends_on('py-pyparsing@2.2.1:', when='@3.4:', type=('build', 'run'))
depends_on('py-python-dateutil@2.1:', type=('build', 'run'))
@@ -149,7 +153,7 @@ class PyMatplotlib(PythonPackage):
depends_on('pkgconfig', type='build')
# Testing dependencies
- # https://matplotlib.org/devel/testing.html#requirements
+ # https://matplotlib.org/stable/devel/development_setup.html#additional-dependencies-for-testing
depends_on('py-pytest@3.6:', type='test')
depends_on('ghostscript@9.0:', type='test')
# depends_on('inkscape@:0', type='test')
@@ -160,9 +164,20 @@ class PyMatplotlib(PythonPackage):
conflicts('~image', when='@3.3:', msg='Pillow is no longer an optional dependency')
+ # https://github.com/matplotlib/matplotlib/pull/21662
+ patch('matplotlibrc.patch', when='@3.5.0')
# Patch to pick up correct freetype headers
patch('freetype-include-path.patch', when='@2.2.2:2.9.9')
+ @property
+ def config_file(self):
+ # https://github.com/matplotlib/matplotlib/pull/20871
+ return 'mplsetup.cfg' if self.spec.satisfies('@3.5:') else 'setup.cfg'
+
+ @property
+ def archive_files(self):
+ return [os.path.join(self.build_directory, self.config_file)]
+
def setup_build_environment(self, env):
include = []
library = []
@@ -183,19 +198,19 @@ class PyMatplotlib(PythonPackage):
backend = self.spec.variants['backend'].value
- with open('setup.cfg', 'w') as setup:
+ with open(self.config_file, 'w') as config:
# Default backend
- setup.write('[rc_options]\n')
- setup.write('backend = ' + backend + '\n')
+ config.write('[rc_options]\n')
+ config.write('backend = ' + backend + '\n')
# Starting with version 3.3.0, freetype is downloaded by default
# Force matplotlib to use Spack installations of freetype and qhull
- if self.version >= Version('3.3.0'):
- setup.write('[libs]\n')
- setup.write('system_freetype = True\n')
- setup.write('system_qhull = True\n')
+ if self.spec.satisfies('@3.3:'):
+ config.write('[libs]\n')
+ config.write('system_freetype = True\n')
+ config.write('system_qhull = True\n')
if self.spec.satisfies('%clang'):
- setup.write('enable_lto = False\n')
+ config.write('enable_lto = False\n')
@run_after('build')
@on_package_attributes(run_tests=True)
diff --git a/var/spack/repos/builtin/packages/py-mmcv/package.py b/var/spack/repos/builtin/packages/py-mmcv/package.py
index d8a5ac91e0..f328ec4c50 100644
--- a/var/spack/repos/builtin/packages/py-mmcv/package.py
+++ b/var/spack/repos/builtin/packages/py-mmcv/package.py
@@ -20,7 +20,7 @@ class PyMmcv(PythonPackage):
depends_on('py-addict', type=('build', 'run'))
depends_on('py-numpy@1.11.1:', type=('build', 'run'))
depends_on('py-pyyaml', type=('build', 'run'))
- depends_on('opencv+python', type=('build', 'run'))
+ depends_on('opencv+python3', type=('build', 'run'))
depends_on('py-cython', type='build')
patch('opencv_for0.5.1.patch', when='@0.5.1')
diff --git a/var/spack/repos/builtin/packages/py-more-itertools/package.py b/var/spack/repos/builtin/packages/py-more-itertools/package.py
index 2a9a49975a..5afe7c2547 100644
--- a/var/spack/repos/builtin/packages/py-more-itertools/package.py
+++ b/var/spack/repos/builtin/packages/py-more-itertools/package.py
@@ -12,12 +12,14 @@ class PyMoreItertools(PythonPackage):
homepage = "https://github.com/erikrose/more-itertools"
pypi = "more-itertools/more-itertools-7.2.0.tar.gz"
- version('7.2.0', sha256='409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832')
- version('7.0.0', sha256='c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a')
- version('5.0.0', sha256='38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4')
- version('4.3.0', sha256='c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e')
- version('4.1.0', sha256='c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44')
- version('2.2', sha256='93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0')
+ version('8.11.0', sha256='0a2fd25d343c08d7e7212071820e7e7ea2f41d8fb45d6bc8a00cd6ce3b7aab88')
+ version('8.9.0', sha256='8c746e0d09871661520da4f1241ba6b908dc903839733c8203b552cffaf173bd')
+ version('7.2.0', sha256='409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832')
+ version('7.0.0', sha256='c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a')
+ version('5.0.0', sha256='38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4')
+ version('4.3.0', sha256='c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e')
+ version('4.1.0', sha256='c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44')
+ version('2.2', sha256='93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0')
depends_on('python@3.5:', when='@7.1:', type=('build', 'run'))
depends_on('python@3.4:', when='@6:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-mpld3/package.py b/var/spack/repos/builtin/packages/py-mpld3/package.py
index f3ba568b5d..2217452114 100644
--- a/var/spack/repos/builtin/packages/py-mpld3/package.py
+++ b/var/spack/repos/builtin/packages/py-mpld3/package.py
@@ -13,9 +13,11 @@ class PyMpld3(PythonPackage):
homepage = "https://mpld3.github.io/"
pypi = "mpld3/mpld3-0.3.tar.gz"
+ version('0.5.5', sha256='b080f3535238a71024c0158280ab4f6091717c45347c41c907012f8dd6da1bd5')
version('0.3', sha256='4d455884a211bf99b37ecc760759435c7bb6a5955de47d8daf4967e301878ab7')
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-matplotlib@1.3:', type=('build', 'run'))
+ depends_on('py-matplotlib@2.2.2:', type=('build', 'run'), when='@0.5.5:')
depends_on('py-jinja2@2.7:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-mplhep-data/package.py b/var/spack/repos/builtin/packages/py-mplhep-data/package.py
new file mode 100644
index 0000000000..9a1bc7593c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-mplhep-data/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyMplhepData(PythonPackage):
+ """Font (Data) sub-package for mplhep"""
+
+ homepage = "https://github.com/Scikit-HEP/mplhep_data"
+ pypi = "mplhep_data/mplhep_data-0.0.3.tar.gz"
+
+ version('0.0.3', sha256='b54d257f3f53c93a442cda7a6681ce267277e09173c0b41fd78820f78321772f')
+
+ depends_on('python@3.7:', type=('build', 'run'))
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-setuptools-scm@3.4:+toml', type='build')
diff --git a/var/spack/repos/builtin/packages/py-mplhep/package.py b/var/spack/repos/builtin/packages/py-mplhep/package.py
new file mode 100644
index 0000000000..b2123010da
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-mplhep/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyMplhep(PythonPackage):
+ """Matplotlib styles for HEP"""
+
+ homepage = "https://github.com/scikit-hep/mplhep"
+ pypi = "mplhep/mplhep-0.3.15.tar.gz"
+
+ version('0.3.15', sha256='595f796ea65930094e86a805214e0d44537ead267a7487ae16eda02d1670653e')
+
+ depends_on('python@3.7:', type=('build', 'run'))
+ depends_on('py-setuptools@39.2:', type='build')
+ depends_on('py-mplhep-data', type=('build', 'run'))
+ depends_on('py-matplotlib@3.4:', type=('build', 'run'))
+ depends_on('py-numpy@1.16.0:', type=('build', 'run'))
+ depends_on('py-packaging', type=('build', 'run'))
+ depends_on('py-uhi@0.2.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-mpmath/package.py b/var/spack/repos/builtin/packages/py-mpmath/package.py
index ae471b99b7..603580bdda 100644
--- a/var/spack/repos/builtin/packages/py-mpmath/package.py
+++ b/var/spack/repos/builtin/packages/py-mpmath/package.py
@@ -11,6 +11,10 @@ class PyMpmath(PythonPackage):
homepage = "https://mpmath.org"
pypi = "mpmath/mpmath-1.0.0.tar.gz"
+ version('1.2.1', sha256='79ffb45cf9f4b101a807595bcb3e72e0396202e0b1d25d689134b48c4216a81a')
version('1.1.0', sha256='fc17abe05fbab3382b61a123c398508183406fa132e0223874578e20946499f6')
version('1.0.0', sha256='04d14803b6875fe6d69e6dccea87d5ae5599802e4b1df7997bddd2024001050c')
version('0.19', sha256='68ddf6426dcda445323467d89892d2cffbbd1ae0b31ac1241b1b671749d63222')
+
+ depends_on('py-setuptools@36.7.0:', type='build', when='@1.2.0:')
+ depends_on('py-setuptools-scm@1.7.0:', type='build', when='@1.2.0:')
diff --git a/var/spack/repos/builtin/packages/py-multidict/package.py b/var/spack/repos/builtin/packages/py-multidict/package.py
index 0ea09e54c3..75db4cc988 100644
--- a/var/spack/repos/builtin/packages/py-multidict/package.py
+++ b/var/spack/repos/builtin/packages/py-multidict/package.py
@@ -11,7 +11,11 @@ class PyMultidict(PythonPackage):
homepage = "https://github.com/aio-libs/multidict"
url = "https://github.com/aio-libs/multidict/archive/v4.7.6.tar.gz"
+ version('5.2.0', sha256='70039c8d0f4883816de230619c9d4ee1b8527b3628a42783e8bc26de4fee1154')
+ version('5.1.0', sha256='1798708288851b808d2d03ea6046ca51bc44c228aaea12c9643a0a481ee41d8c')
version('4.7.6', sha256='449035f89a12f189579ff83811424c71e4a39e335bcb8045145ad084b7bde2dc')
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@40:', type='build', when='@5.1.0:')
depends_on('python@3.5:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@5.1.0:')
diff --git a/var/spack/repos/builtin/packages/py-mxfold2/package.py b/var/spack/repos/builtin/packages/py-mxfold2/package.py
new file mode 100644
index 0000000000..c6838a97dc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-mxfold2/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyMxfold2(PythonPackage):
+ """MXfold2: RNA secondary structure prediction using deep
+ learning with thermodynamic integration"""
+
+ homepage = "https://github.com/keio-bioinformatics/mxfold2"
+ url = "https://github.com/keio-bioinformatics/mxfold2/releases/download/v0.1.1/mxfold2-0.1.1.tar.gz"
+
+ maintainers = ['dorton21']
+
+ version('0.1.1', sha256='9f39c6ff4138212d1ad2639005f5c05ffb4df0f7e22f5e7ad49466a05aa047e5')
+
+ depends_on('python@3.7:', type=('build', 'run'))
+ depends_on('py-torch@1.7:~valgrind', type=('build', 'run'))
+ depends_on('py-torchvision', type=('build', 'run'))
+ depends_on('py-wheel@0.35.1:0.36.0', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools-cpp', type='build')
+ depends_on('cmake', type='build')
diff --git a/var/spack/repos/builtin/packages/py-mypy/package.py b/var/spack/repos/builtin/packages/py-mypy/package.py
index f70b07204b..adcad33a2b 100644
--- a/var/spack/repos/builtin/packages/py-mypy/package.py
+++ b/var/spack/repos/builtin/packages/py-mypy/package.py
@@ -12,6 +12,7 @@ class PyMypy(PythonPackage):
homepage = "http://www.mypy-lang.org/"
pypi = "mypy/mypy-0.740.tar.gz"
+ version('0.920', sha256='a55438627f5f546192f13255a994d6d1cf2659df48adcf966132b4379fd9c86b')
version('0.910', sha256='704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150')
version('0.900', sha256='65c78570329c54fb40f956f7645e2359af5da9d8c54baa44f461cdc7f4984108')
version('0.800', sha256='e0202e37756ed09daf4b0ba64ad2c245d357659e014c3f51d8cd0681ba66940a')
@@ -20,9 +21,15 @@ class PyMypy(PythonPackage):
variant('python2', default=False, description='Enable checking of python 2 type annotations')
+ depends_on('python@3.6:', when='@0.920:', type=('build', 'run'))
depends_on("python@3.5:", type=("build", "run"))
+ depends_on('py-setuptools@40.6.2:', when='@0.790:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
- depends_on('py-typed-ast@1.4.0:1.4', type=('build', 'run'))
+ depends_on('py-wheel@0.30:', when='@0.790:', type='build')
+ depends_on('py-typed-ast@1.4.0:1', when='@0.920: ^python@:3.7', type=('build', 'run'))
+ depends_on('py-typed-ast@1.4.0:1.4', when='@0.900:0.910 ^python@:3.7', type=('build', 'run'))
+ depends_on('py-typed-ast@1.4.0:1.4', when='@:0.800', type=('build', 'run'))
depends_on('py-typing-extensions@3.7.4:', type=('build', 'run'))
depends_on('py-mypy-extensions@0.4.3:0.4', type=('build', 'run'))
- depends_on('py-toml', when='@0.900:', type=('build', 'run'))
+ depends_on('py-tomli@1.1:2', when='@0.920:', type=('build', 'run'))
+ depends_on('py-toml', when='@0.900:0.910', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-nbclient/package.py b/var/spack/repos/builtin/packages/py-nbclient/package.py
index d181f2ae4b..1d90940491 100644
--- a/var/spack/repos/builtin/packages/py-nbclient/package.py
+++ b/var/spack/repos/builtin/packages/py-nbclient/package.py
@@ -12,12 +12,15 @@ class PyNbclient(PythonPackage):
homepage = "https://jupyter.org/"
pypi = "nbclient/nbclient-0.5.0.tar.gz"
+ version('0.5.5', sha256='ed7d18431393750d29a64da432e0b7889274eb5a5056682be5691b1b1dc8f755')
version('0.5.0', sha256='8ad52d27ba144fca1402db014857e53c5a864a2f407be66ca9d74c3a56d6591d')
depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('python@3.6.1:', type=('build', 'run'), when='@0.5.5:')
depends_on('py-setuptools', type='build')
depends_on('py-traitlets@4.2:', type=('build', 'run'))
depends_on('py-jupyter-client@6.1.5:', type=('build', 'run'))
depends_on('py-nbformat@5.0:', type=('build', 'run'))
- depends_on('py-async-generator', type=('build', 'run'))
+ depends_on('py-async-generator', type=('build', 'run'), when='@0.5.0')
+ depends_on('py-async-generator', type=('build', 'run'), when='@0.5.5: ^python@:3.6')
depends_on('py-nest-asyncio', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-nbconvert/package.py b/var/spack/repos/builtin/packages/py-nbconvert/package.py
index 47dc2dabd3..6ebff12ec7 100644
--- a/var/spack/repos/builtin/packages/py-nbconvert/package.py
+++ b/var/spack/repos/builtin/packages/py-nbconvert/package.py
@@ -10,6 +10,8 @@ class PyNbconvert(PythonPackage):
homepage = "https://github.com/jupyter/nbconvert"
pypi = "nbconvert/nbconvert-6.0.1.tar.gz"
+ version('6.3.0', sha256='5e77d6203854944520105e38f2563a813a4a3708e8563aa598928a3b5ee1081a')
+ version('6.2.0', sha256='16ceecd0afaa8fd26c245fa32e2c52066c02f13aa73387fffafd84750baea863')
version('6.0.1', sha256='db94117fbac29153834447e31b30cda337d4450e46e0bdb1a36eafbbf4435156')
version('5.6.0', sha256='427a468ec26e7d68a529b95f578d5cbf018cb4c1f889e897681c2b6d11897695')
version('5.5.0', sha256='138381baa41d83584459b5cfecfc38c800ccf1f37d9ddd0bd440783346a4c39c')
@@ -22,6 +24,7 @@ class PyNbconvert(PythonPackage):
depends_on('python@2.7:2.8,3.3:', type=('build', 'run'))
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@5:')
depends_on('python@3.6:', type=('build', 'run'), when='@6:')
+ depends_on('python@3.7:', type=('build', 'run'), when='@6.2.0:')
depends_on('py-setuptools', type=('build', 'run'), when='@5:')
depends_on('py-pycurl', type='build', when='^python@:2.7.8')
depends_on('py-mistune@0.8.1:1', type=('build', 'run'))
@@ -32,6 +35,7 @@ class PyNbconvert(PythonPackage):
depends_on('py-jupyterlab-pygments', type=('build', 'run'), when='@6:')
depends_on('py-traitlets', type=('build', 'run'))
depends_on('py-traitlets@4.2:', type=('build', 'run'), when='@5:')
+ depends_on('py-traitlets@5:', type=('build', 'run'), when='@6.2.0:')
depends_on('py-jupyter-core', type=('build', 'run'))
depends_on('py-nbformat', type=('build', 'run'))
depends_on('py-nbformat@4.4.0:', type=('build', 'run'), when='@5:')
diff --git a/var/spack/repos/builtin/packages/py-nbdime/package.py b/var/spack/repos/builtin/packages/py-nbdime/package.py
new file mode 100644
index 0000000000..e52d5aae7d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-nbdime/package.py
@@ -0,0 +1,30 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyNbdime(PythonPackage):
+ """Diff and merge of Jupyter Notebooks"""
+
+ homepage = "https://nbdime.readthedocs.io/"
+ pypi = "nbdime/nbdime-3.1.1.tar.gz"
+
+ version('3.1.1', sha256='67767320e971374f701a175aa59abd3a554723039d39fae908e72d16330d648b')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@40.8.0:', type='build')
+ depends_on('py-nbformat', type=('build', 'run'))
+ depends_on('py-colorama', type=('build', 'run'))
+ depends_on('py-pygments', type=('build', 'run'))
+ depends_on('py-tornado', type=('build', 'run'))
+ depends_on('py-requests', type=('build', 'run'))
+ depends_on('py-gitpython@:2.1.3,2.1.7:', type=('build', 'run'))
+ depends_on('py-jupyter-server', type=('build', 'run'))
+ depends_on('py-jupyter-server-mathjax@0.2.2:', type=('build', 'run'))
+ depends_on('py-jinja2@2.9:', type=('build', 'run'))
+ # From pyproject.toml
+ depends_on('py-jupyterlab@3.0:3', type=('build', 'run'))
+ depends_on('py-wheel', type='build')
diff --git a/var/spack/repos/builtin/packages/py-nest-asyncio/package.py b/var/spack/repos/builtin/packages/py-nest-asyncio/package.py
index 74a8fc7fe3..62a54f2964 100644
--- a/var/spack/repos/builtin/packages/py-nest-asyncio/package.py
+++ b/var/spack/repos/builtin/packages/py-nest-asyncio/package.py
@@ -10,7 +10,9 @@ class PyNestAsyncio(PythonPackage):
homepage = "https://github.com/erdewit/nest_asyncio"
pypi = "nest-asyncio/nest_asyncio-1.4.0.tar.gz"
+ version('1.5.1', sha256='afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa')
version('1.4.0', sha256='5773054bbc14579b000236f85bc01ecced7ffd045ec8ca4a9809371ec65a59c8')
depends_on('python@3.5:', type=('build', 'run'))
- depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-setuptools-scm@3.4.3: +toml', type='build')
diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py
index 1f5e13f1db..f44aecd3b5 100644
--- a/var/spack/repos/builtin/packages/py-networkx/package.py
+++ b/var/spack/repos/builtin/packages/py-networkx/package.py
@@ -13,6 +13,7 @@ class PyNetworkx(PythonPackage):
homepage = "https://networkx.github.io/"
pypi = "networkx/networkx-2.4.tar.gz"
+ version('2.6.3', sha256='c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51')
version('2.5.1', sha256='109cd585cac41297f71103c3c42ac6ef7379f29788eb54cb751be5a663bb235a')
version('2.4', sha256='f8f4ff0b6f96e4f9b16af6b84622597b5334bf9cae8cf9b2e42e7985d5c95c64')
version('2.3', sha256='8311ddef63cf5c5c5e7c1d0212dd141d9a1fe3f474915281b73597ed5f1d4e3d')
@@ -25,11 +26,12 @@ class PyNetworkx(PythonPackage):
depends_on('python@2.7:', type=('build', 'run'))
depends_on('python@3.5:', type=('build', 'run'), when='@2.3:')
depends_on('python@3.6:', type=('build', 'run'), when='@2.5:')
+ depends_on('python@3.7:', type=('build', 'run'), when='@2.6:')
depends_on('py-setuptools', type='build')
- depends_on('py-decorator@3.4.0:', type=('build', 'run'))
- depends_on('py-decorator@4.1.0:', type=('build', 'run'), when='@2.0:')
- depends_on('py-decorator@4.3.0:', type=('build', 'run'), when='@2.2:')
- depends_on('py-decorator@4.3.0:4', type=('build', 'run'), when='@2.5.1:')
+ depends_on('py-decorator@3.4.0:', type=('build', 'run'), when='@:1')
+ depends_on('py-decorator@4.1.0:', type=('build', 'run'), when='@2.0:2.1')
+ depends_on('py-decorator@4.3.0:', type=('build', 'run'), when='@2.2:2.4')
+ depends_on('py-decorator@4.3.0:4', type=('build', 'run'), when='@2.5.1:2.5')
def url_for_version(self, version):
ext = 'tar.gz'
diff --git a/var/spack/repos/builtin/packages/py-neurokit2/package.py b/var/spack/repos/builtin/packages/py-neurokit2/package.py
index 857541e438..9cca27e907 100644
--- a/var/spack/repos/builtin/packages/py-neurokit2/package.py
+++ b/var/spack/repos/builtin/packages/py-neurokit2/package.py
@@ -19,6 +19,7 @@ class PyNeurokit2(PythonPackage):
homepage = "https://github.com/neuropsychology/NeuroKit"
pypi = "neurokit2/neurokit2-0.1.2.tar.gz"
+ version('0.1.5', sha256='4df48c0ce8971e32e32f36c2263986b00fd83da5eadaaa98e4bb5ab6bcd930e5')
version('0.1.4.1', sha256='226bb04bb369d8bb87d99831f0a93cd8d0ed96fdc500f63de0b3550082876f6e')
version('0.1.2', sha256='5ef40037c2d7078ecb713ab0b77b850267babf133856b59595de9613f29787bc')
diff --git a/var/spack/repos/builtin/packages/py-neurolab/package.py b/var/spack/repos/builtin/packages/py-neurolab/package.py
new file mode 100644
index 0000000000..f58bc2116a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-neurolab/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyNeurolab(PythonPackage):
+ """Simple and powerfull neural network library for python"""
+
+ homepage = "http://neurolab.googlecode.com/"
+ pypi = "neurolab/neurolab-0.3.5.tar.gz"
+
+ version('0.3.5', sha256='96ec311988383c63664f3325668f27c30561cf4349e3bc5420665c042a3b9191')
+
+ depends_on('py-numpy', type=('build', 'run'))
+ # depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-ninja/package.py b/var/spack/repos/builtin/packages/py-ninja/package.py
new file mode 100644
index 0000000000..944163676f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ninja/package.py
@@ -0,0 +1,35 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+
+
+class PyNinja(PythonPackage):
+ """Ninja is a small build system with a focus on speed."""
+
+ homepage = "https://ninja-build.org"
+ pypi = "ninja/ninja-1.10.2.tar.gz"
+
+ version('1.10.2', sha256='bb5e54b9a7343b3a8fc6532ae2c169af387a45b0d4dd5b72c2803e21658c5791')
+
+ depends_on('cmake@3.6:', type='build')
+ depends_on('py-scikit-build', type='build')
+ depends_on('ninja@1.10.2', type=('build', 'run'), when='@1.10.2')
+
+ def patch(self):
+ os.unlink(join_path(self.stage.source_path, 'CMakeLists.txt'))
+
+ @run_after('install')
+ def installit(self):
+ syntax_file = os.path.join(self.spec['ninja'].prefix.misc,
+ 'ninja_syntax.py')
+ bin_file = os.path.join(self.spec['ninja'].prefix.bin,
+ 'ninja')
+ dst = os.path.join(site_packages_dir,
+ 'ninja')
+ dstbin = os.path.join(dst, 'data', 'bin')
+ mkdirp(dstbin)
+ os.symlink(bin_file, os.path.join(dstbin, 'ninja'))
+ os.symlink(syntax_file, os.path.join(dst, 'ninja_syntax.py'))
diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py
index 542931d39c..9a50b115f1 100644
--- a/var/spack/repos/builtin/packages/py-nose/package.py
+++ b/var/spack/repos/builtin/packages/py-nose/package.py
@@ -16,4 +16,4 @@ class PyNose(PythonPackage):
version('1.3.6', sha256='f61e0909a743eed37b1207e38a8e7b4a2fe0a82185e36f2be252ef1b3f901758')
version('1.3.4', sha256='76bc63a4e2d5e5a0df77ca7d18f0f56e2c46cfb62b71103ba92a92c79fab1e03')
- depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@:57', type='build')
diff --git a/var/spack/repos/builtin/packages/py-notebook/package.py b/var/spack/repos/builtin/packages/py-notebook/package.py
index 1cfbfa496f..9ad9eb950b 100644
--- a/var/spack/repos/builtin/packages/py-notebook/package.py
+++ b/var/spack/repos/builtin/packages/py-notebook/package.py
@@ -10,6 +10,7 @@ class PyNotebook(PythonPackage):
homepage = "https://github.com/jupyter/notebook"
pypi = "notebook/notebook-6.1.4.tar.gz"
+ version('6.4.5', sha256='872e20da9ae518bbcac3e4e0092d5bd35454e847dedb8cb9739e9f3b68406be0')
version('6.1.4', sha256='687d01f963ea20360c0b904ee7a37c3d8cda553858c8d6e33fd0afd13e89de32')
version('6.0.3', sha256='47a9092975c9e7965ada00b9a20f0cf637d001db60d241d479f53c0be117ad48')
version('6.0.1', sha256='660976fe4fe45c7aa55e04bf4bccb9f9566749ff637e9020af3422f9921f9a5d')
@@ -33,11 +34,13 @@ class PyNotebook(PythonPackage):
depends_on('python@2.7:2.8,3.3:', type=('build', 'run'))
depends_on('python@3.5:', type=('build', 'run'), when='@6:')
+ depends_on('python@3.6:', type=('build', 'run'), when='@6.3:')
depends_on('py-setuptools', type=('build', 'run'), when='@5:')
depends_on('py-jinja2', type=('build', 'run'))
depends_on('py-tornado@4.0:6', type=('build', 'run'), when='@:5.7.4')
depends_on('py-tornado@4.1:6', type=('build', 'run'), when='@5.7.5:5')
depends_on('py-tornado@5.0:', type=('build', 'run'), when='@6:')
+ depends_on('py-tornado@6.1:', type=('build', 'run'), when='@6.4.5:')
depends_on('py-pyzmq@17:', type=('build', 'run'), when='@6:')
depends_on('py-argon2-cffi', type=('build', 'run'), when='@6.1:')
depends_on('py-ipython-genutils', type=('build', 'run'))
@@ -55,6 +58,7 @@ class PyNotebook(PythonPackage):
depends_on('py-nbconvert', type=('build', 'run'))
depends_on('py-ipykernel', type=('build', 'run'))
depends_on('py-send2trash', type=('build', 'run'), when='@6:')
+ depends_on('py-send2trash@1.5.0:', type=('build', 'run'), when='@6.2.0:')
depends_on('py-terminado@0.3.3:', type=('build', 'run'), when='@:5.7.0')
depends_on('py-terminado@0.8.1:', type=('build', 'run'), when='@5.7.0:')
depends_on('py-terminado@0.8.3:', type=('build', 'run'), when='@6.1:')
diff --git a/var/spack/repos/builtin/packages/py-ntplib/package.py b/var/spack/repos/builtin/packages/py-ntplib/package.py
index 5fe6aa25a0..5fe6aa25a0 100755..100644
--- a/var/spack/repos/builtin/packages/py-ntplib/package.py
+++ b/var/spack/repos/builtin/packages/py-ntplib/package.py
diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py
index a808922850..b4f71ebcaa 100644
--- a/var/spack/repos/builtin/packages/py-numpy/package.py
+++ b/var/spack/repos/builtin/packages/py-numpy/package.py
@@ -23,7 +23,8 @@ class PyNumpy(PythonPackage):
maintainers = ['adamjstewart']
version('main', branch='main')
- version('master', branch='main', deprecated=True)
+ version('1.21.5', sha256='6a5928bc6241264dce5ed509e66f33676fc97f464e7a919edc672fb5532221ee')
+ version('1.21.4', sha256='e6c76a87633aa3fa16614b61ccedfae45b91df2767cf097aa9c933932a7ed1e0')
version('1.21.3', sha256='63571bb7897a584ca3249c86dd01c10bcb5fe4296e3568b2e9c1a55356b6410e')
version('1.21.2', sha256='423216d8afc5923b15df86037c6053bf030d15cc9e3224206ef868c2d63dd6dc')
version('1.21.1', sha256='dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd')
@@ -95,10 +96,10 @@ class PyNumpy(PythonPackage):
depends_on('python@3.7:3.10', type=('build', 'link', 'run'), when='@1.21.2:')
depends_on('py-setuptools', type=('build', 'run'))
# Check pyproject.toml for updates to the required cython version
- depends_on('py-cython@0.29.13:', when='@1.18.0:', type='build')
- depends_on('py-cython@0.29.14:', when='@1.18.1:', type='build')
- depends_on('py-cython@0.29.21:', when='@1.19.1:', type='build')
- depends_on('py-cython@0.29.24:', when='@1.21.2:', type='build')
+ depends_on('py-cython@0.29.13:2', when='@1.18.0:', type='build')
+ depends_on('py-cython@0.29.14:2', when='@1.18.1:', type='build')
+ depends_on('py-cython@0.29.21:2', when='@1.19.1:', type='build')
+ depends_on('py-cython@0.29.24:2', when='@1.21.2:', type='build')
depends_on('blas', when='+blas')
depends_on('lapack', when='+lapack')
@@ -207,7 +208,9 @@ class PyNumpy(PythonPackage):
# Tell numpy where to find BLAS/LAPACK libraries
with open('site.cfg', 'w') as f:
- if '^intel-mkl' in spec or '^intel-parallel-studio+mkl' in spec:
+ if '^intel-mkl' in spec or \
+ '^intel-parallel-studio+mkl' or \
+ '^intel-oneapi-mkl' in spec:
f.write('[mkl]\n')
# FIXME: as of @1.11.2, numpy does not work with separately
# specified threading and interface layers. A workaround is a
@@ -300,7 +303,8 @@ class PyNumpy(PythonPackage):
if 'blas' not in spec:
blas = ''
elif spec['blas'].name == 'intel-mkl' or \
- spec['blas'].name == 'intel-parallel-studio':
+ spec['blas'].name == 'intel-parallel-studio' or \
+ spec['blas'].name == 'intel-oneapi-mkl':
blas = 'mkl'
elif spec['blas'].name == 'blis':
blas = 'blis'
@@ -319,7 +323,8 @@ class PyNumpy(PythonPackage):
if 'lapack' not in spec:
lapack = ''
elif spec['lapack'].name == 'intel-mkl' or \
- spec['lapack'].name == 'intel-parallel-studio':
+ spec['lapack'].name == 'intel-parallel-studio' or \
+ spec['lapack'].name == 'intel-oneapi-mkl':
lapack = 'mkl'
elif spec['lapack'].name == 'openblas':
lapack = 'openblas'
diff --git a/var/spack/repos/builtin/packages/py-oauthlib/package.py b/var/spack/repos/builtin/packages/py-oauthlib/package.py
index be59718198..9fe6798ae0 100644
--- a/var/spack/repos/builtin/packages/py-oauthlib/package.py
+++ b/var/spack/repos/builtin/packages/py-oauthlib/package.py
@@ -15,14 +15,19 @@ class PyOauthlib(PythonPackage):
homepage = "https://github.com/oauthlib/oauthlib"
pypi = "oauthlib/oauthlib-3.1.0.tar.gz"
+ version('3.1.1', sha256='8f0215fcc533dd8dd1bee6f4c412d4f0cd7297307d43ac61666389e3bc3198a3')
version('3.1.0', sha256='bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889')
version('3.0.1', sha256='0ce32c5d989a1827e3f1148f98b9085ed2370fc939bf524c9c851d8714797298')
version('2.0.2', sha256='b3b9b47f2a263fe249b5b48c4e25a5bce882ff20a0ac34d553ce43cff55b53ac')
variant('extras', default=True, description='Build with pyjwt, blinker, cryptography')
- depends_on('py-setuptools', type='build')
- depends_on('py-pyjwt@1.0.0:', type=('build', 'run'), when='+extras')
- depends_on('py-blinker', type=('build', 'run'), when='+extras')
- depends_on('py-cryptography', type=('build', 'run'), when='+extras')
- depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pyjwt@1.0.0:', type=('build', 'run'), when='+extras')
+ depends_on('py-pyjwt@2.0.0:2', type=('build', 'run'), when='+extras @3.1.1:')
+ depends_on('py-blinker', type=('build', 'run'), when='+extras')
+ depends_on('py-blinker@1.4:', type=('build', 'run'), when='+extras @3.1.1:')
+ depends_on('py-cryptography', type=('build', 'run'), when='+extras')
+ depends_on('py-cryptography@3.0.0:3', type=('build', 'run'), when='+extras @3.1.1:')
+ depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@3.1.1:')
diff --git a/var/spack/repos/builtin/packages/py-onnx-runtime/cms.patch b/var/spack/repos/builtin/packages/py-onnx-runtime/cms.patch
new file mode 100644
index 0000000000..ebe626c4d7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-onnx-runtime/cms.patch
@@ -0,0 +1,146 @@
+diff -ur a/cmake/CMakeLists.txt b/cmake/CMakeLists.txt
+--- a/cmake/CMakeLists.txt 2021-12-14 22:34:21.754062247 +0100
++++ b/cmake/CMakeLists.txt 2021-12-14 22:36:18.094061909 +0100
+@@ -240,7 +240,7 @@
+ if (onnxruntime_MINIMAL_BUILD_CUSTOM_OPS)
+ add_compile_definitions(ORT_MINIMAL_BUILD_CUSTOM_OPS)
+ endif()
+-
++
+ set(onnxruntime_REDUCED_OPS_BUILD ON)
+
+ if (NOT onnxruntime_ENABLE_PYTHON)
+@@ -568,7 +568,7 @@
+ endif()
+ endif()
+
+-if(NOT WIN32 AND NOT onnxruntime_PREFER_SYSTEM_LIB)
++if(NOT WIN32)
+ add_subdirectory(${PROJECT_SOURCE_DIR}/external/nsync EXCLUDE_FROM_ALL)
+ endif()
+ # External dependencies
+@@ -596,7 +596,7 @@
+ if(NOT Protobuf_USE_STATIC_LIBS)
+ #Indeed here should be a warning, not a fatal error. ONNX Runtime itself can work in such a
+ #setting but it may cause compatibility issue when ONNX Runtime is integrated with the other ONNX ecosystem softwares.
+- message(FATAL_ERROR "Please enable Protobuf_USE_STATIC_LIBS")
++ message(WARNING "Please enable Protobuf_USE_STATIC_LIBS")
+ endif()
+ else()
+ set(PROTOBUF_LIB protobuf::libprotobuf-lite)
+diff -ur a/include/onnxruntime/core/platform/ort_mutex.h b/include/onnxruntime/core/platform/ort_mutex.h
+--- a/include/onnxruntime/core/platform/ort_mutex.h 2021-12-14 22:34:21.784062247 +0100
++++ b/include/onnxruntime/core/platform/ort_mutex.h 2021-12-14 22:36:18.164061909 +0100
+@@ -101,7 +101,7 @@
+ return steady_clock::now() - steady_now < rel_time ? std::cv_status::no_timeout : std::cv_status::timeout;
+ }
+ } // namespace onnxruntime
+-#else
++#elif !defined(__aarch64__)
+ #include "nsync.h"
+ #include <mutex> //for unique_lock
+ #include <condition_variable> //for cv_status
+@@ -186,4 +186,11 @@
+ return steady_clock::now() - steady_now < rel_time ? std::cv_status::no_timeout : std::cv_status::timeout;
+ }
+ }; // namespace onnxruntime
++#else
++#include <mutex>
++#include <condition_variable>
++namespace onnxruntime {
++using OrtMutex = std::mutex;
++using OrtCondVar = std::condition_variable;
++} // namespace onnxruntime
+ #endif
+diff -ur a/include/onnxruntime/core/session/onnxruntime_cxx_api.h b/include/onnxruntime/core/session/onnxruntime_cxx_api.h
+--- a/include/onnxruntime/core/session/onnxruntime_cxx_api.h 2021-12-14 22:34:21.784062247 +0100
++++ b/include/onnxruntime/core/session/onnxruntime_cxx_api.h 2021-12-14 22:36:18.164061909 +0100
+@@ -345,8 +345,8 @@
+
+ struct Session : Base<OrtSession> {
+ explicit Session(std::nullptr_t) {}
+- Session(Env& env, const ORTCHAR_T* model_path, const SessionOptions& options);
+- Session(Env& env, const void* model_data, size_t model_data_length, const SessionOptions& options);
++ Session(const Env& env, const ORTCHAR_T* model_path, const SessionOptions& options);
++ Session(const Env& env, const void* model_data, size_t model_data_length, const SessionOptions& options);
+
+ // Run that will allocate the output values
+ std::vector<Value> Run(const RunOptions& run_options, const char* const* input_names, const Value* input_values, size_t input_count,
+diff -ur a/include/onnxruntime/core/session/onnxruntime_cxx_inline.h b/include/onnxruntime/core/session/onnxruntime_cxx_inline.h
+--- a/include/onnxruntime/core/session/onnxruntime_cxx_inline.h 2021-12-14 22:34:21.784062247 +0100
++++ b/include/onnxruntime/core/session/onnxruntime_cxx_inline.h 2021-12-14 22:36:18.164061909 +0100
+@@ -500,11 +500,11 @@
+ return *this;
+ }
+
+-inline Session::Session(Env& env, const ORTCHAR_T* model_path, const SessionOptions& options) {
++inline Session::Session(const Env& env, const ORTCHAR_T* model_path, const SessionOptions& options) {
+ ThrowOnError(GetApi().CreateSession(env, model_path, options, &p_));
+ }
+
+-inline Session::Session(Env& env, const void* model_data, size_t model_data_length, const SessionOptions& options) {
++inline Session::Session(const Env& env, const void* model_data, size_t model_data_length, const SessionOptions& options) {
+ ThrowOnError(GetApi().CreateSessionFromArray(env, model_data, model_data_length, options, &p_));
+ }
+
+diff -ur a/onnxruntime/core/mlas/lib/platform.cpp b/onnxruntime/core/mlas/lib/platform.cpp
+--- a/onnxruntime/core/mlas/lib/platform.cpp 2021-12-14 22:34:21.864062247 +0100
++++ b/onnxruntime/core/mlas/lib/platform.cpp 2021-12-14 22:36:18.244061908 +0100
+@@ -16,6 +16,7 @@
+ --*/
+
+ #include "mlasi.h"
++#include <string>
+
+ //
+ // Stores the platform information.
+@@ -170,8 +171,11 @@
+ //
+
+ uint64_t xcr0 = MlasReadExtendedControlRegister(_XCR_XFEATURE_ENABLED_MASK);
++ const char *cpu_opt = std::getenv("MLAS_DYNAMIC_CPU_ARCH");
++ if (cpu_opt == nullptr) cpu_opt = "99";
++ auto opt = std::stoi(cpu_opt);
+
+- if ((xcr0 & 0x6) == 0x6) {
++ if (opt > 0 && (xcr0 & 0x6) == 0x6) {
+
+ this->GemmFloatKernel = MlasGemmFloatKernelAvx;
+
+@@ -204,7 +208,7 @@
+ __cpuid_count(7, 0, Cpuid7[0], Cpuid7[1], Cpuid7[2], Cpuid7[3]);
+ #endif
+
+- if (((Cpuid1[2] & 0x1000) != 0) && ((Cpuid7[1] & 0x20) != 0)) {
++ if (opt > 1 && ((Cpuid1[2] & 0x1000) != 0) && ((Cpuid7[1] & 0x20) != 0)) {
+
+ this->GemmU8S8Operation = MlasGemmU8X8Operation<MLAS_GEMM_U8S8_KERNEL_AVX2>;
+ this->GemmU8S8PackedOperation = MlasGemmU8X8PackedOperation<MLAS_GEMM_U8S8_KERNEL_AVX2>;
+@@ -264,7 +268,7 @@
+ // operating system supports saving AVX512F state.
+ //
+
+- if (((Cpuid7[1] & 0x10000) != 0) && ((xcr0 & 0xE0) == 0xE0)) {
++ if (opt > 2 && ((Cpuid7[1] & 0x10000) != 0) && ((xcr0 & 0xE0) == 0xE0)) {
+
+ this->GemmFloatKernel = MlasGemmFloatKernelAvx512F;
+ this->GemmDoubleKernel = MlasGemmDoubleKernelAvx512F;
+diff -ur a/onnxruntime/core/platform/posix/ort_mutex.cc b/onnxruntime/core/platform/posix/ort_mutex.cc
+--- a/onnxruntime/core/platform/posix/ort_mutex.cc 2021-12-14 22:34:21.874062247 +0100
++++ b/onnxruntime/core/platform/posix/ort_mutex.cc 2021-12-14 22:36:18.254061908 +0100
+@@ -1,6 +1,7 @@
+ // Copyright (c) Microsoft Corporation. All rights reserved.
+ // Licensed under the MIT License.
+
++#if !defined(__aarch64__)
+ #include "core/common/common.h"
+ #include "core/platform/ort_mutex.h"
+ #include <assert.h>
+@@ -40,4 +41,5 @@
+ nsync::nsync_cv_wait(&native_cv_object, lk.mutex()->native_handle());
+ }
+
+-} // namespace onnxruntime
+\ No newline at end of file
++} // namespace onnxruntime
++#endif
diff --git a/var/spack/repos/builtin/packages/py-onnx-runtime/gcc11.patch b/var/spack/repos/builtin/packages/py-onnx-runtime/gcc11.patch
new file mode 100644
index 0000000000..b53cc1dc6d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-onnx-runtime/gcc11.patch
@@ -0,0 +1,36 @@
+From de4089f8cbe0baffe56a363cc3a41595cc8f0809 Mon Sep 17 00:00:00 2001
+From: ankurverma85 <31362771+ankurverma85@users.noreply.github.com>
+Date: Mon, 10 May 2021 12:50:08 -0700
+Subject: [PATCH] GCC11/Libstdc++11 Compilation fixes (#7599)
+
+Authored-by: Ankur Verma <ankurv@microsoft.com>
+---
+ include/onnxruntime/core/graph/graph_nodes.h | 2 +-
+ onnxruntime/test/providers/cpu/controlflow/loop_test.cc | 1 +
+ 2 files changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/include/onnxruntime/core/graph/graph_nodes.h b/include/onnxruntime/core/graph/graph_nodes.h
+index 422fe9538ea..aec603f7942 100644
+--- a/include/onnxruntime/core/graph/graph_nodes.h
++++ b/include/onnxruntime/core/graph/graph_nodes.h
+@@ -100,7 +100,7 @@ class ValidNodes {
+ using const_reference = const T&;
+
+ /** Construct a NodeInterator and move to the first valid node. */
+- NodeIterator<TIterator>(const TIterator current, const TIterator end, const NodeFilterFunc& filter_fn) noexcept
++ NodeIterator(const TIterator current, const TIterator end, const NodeFilterFunc& filter_fn) noexcept
+ : current_{current}, end_{end}, apply_filter_{filter_fn != nullptr}, filter_func_{&filter_fn} {
+ // skip to next valid node, stopping at end if none are found
+ while (current_ < end && (*current_ == nullptr ||
+diff --git a/onnxruntime/test/providers/cpu/controlflow/loop_test.cc b/onnxruntime/test/providers/cpu/controlflow/loop_test.cc
+index b058e9a16c7..3cf147e997c 100644
+--- a/onnxruntime/test/providers/cpu/controlflow/loop_test.cc
++++ b/onnxruntime/test/providers/cpu/controlflow/loop_test.cc
+@@ -2,6 +2,7 @@
+ // Licensed under the MIT License.
+
+ #include <future>
++#include <thread>
+ #include "gtest/gtest.h"
+ #include "gmock/gmock.h"
+
diff --git a/var/spack/repos/builtin/packages/py-onnx-runtime/libiconv.patch b/var/spack/repos/builtin/packages/py-onnx-runtime/libiconv.patch
new file mode 100644
index 0000000000..b782c15ed1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-onnx-runtime/libiconv.patch
@@ -0,0 +1,42 @@
+--- cmake/onnxruntime.cmake.orig 2021-08-06 12:36:32.720081500 +0200
++++ cmake/onnxruntime.cmake 2021-08-13 18:53:30.310868700 +0200
+@@ -120,7 +120,8 @@
+ onnxruntime_common
+ onnxruntime_mlas
+ onnxruntime_flatbuffers
+- ${onnxruntime_EXTERNAL_LIBRARIES})
++ ${onnxruntime_EXTERNAL_LIBRARIES}
++ -liconv)
+
+ if (onnxruntime_ENABLE_LANGUAGE_INTEROP_OPS)
+ target_link_libraries(onnxruntime PRIVATE onnxruntime_language_interop onnxruntime_pyop)
+--- cmake/onnxruntime_python.cmake.orig 2021-08-06 12:36:32.725148600 +0200
++++ cmake/onnxruntime_python.cmake 2021-08-13 18:54:37.085622000 +0200
+@@ -106,6 +106,7 @@
+ onnxruntime_mlas
+ onnxruntime_flatbuffers
+ ${pybind11_lib}
++ -liconv
+ )
+
+ if (onnxruntime_ENABLE_LANGUAGE_INTEROP_OPS)
+--- cmake/onnxruntime_unittests.cmake.orig 2021-08-13 19:11:58.645461300 +0200
++++ cmake/onnxruntime_unittests.cmake 2021-08-13 19:14:18.373814800 +0200
+@@ -603,7 +603,7 @@
+ AddTest(
+ TARGET onnxruntime_test_all
+ SOURCES ${all_tests} ${onnxruntime_unittest_main_src}
+- LIBS onnx_test_runner_common ${onnxruntime_test_providers_libs} ${onnxruntime_test_common_libs} re2::re2 onnx_test_data_proto
++ LIBS onnx_test_runner_common ${onnxruntime_test_providers_libs} ${onnxruntime_test_common_libs} re2::re2 onnx_test_data_proto -liconv
+ DEPENDS ${all_dependencies}
+ )
+
+@@ -723,7 +723,7 @@
+ target_compile_options(onnx_test_runner PRIVATE "$<$<COMPILE_LANGUAGE:CUDA>:SHELL:--compiler-options /utf-8>"
+ "$<$<NOT:$<COMPILE_LANGUAGE:CUDA>>:/utf-8>")
+ endif()
+-target_link_libraries(onnx_test_runner PRIVATE onnx_test_runner_common ${GETOPT_LIB_WIDE} ${onnx_test_libs})
++target_link_libraries(onnx_test_runner PRIVATE onnx_test_runner_common ${GETOPT_LIB_WIDE} ${onnx_test_libs} -liconv)
+ target_include_directories(onnx_test_runner PRIVATE ${ONNXRUNTIME_ROOT})
+ set_target_properties(onnx_test_runner PROPERTIES FOLDER "ONNXRuntimeTest")
+
diff --git a/var/spack/repos/builtin/packages/py-onnx-runtime/package.py b/var/spack/repos/builtin/packages/py-onnx-runtime/package.py
new file mode 100644
index 0000000000..2bb30786a8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-onnx-runtime/package.py
@@ -0,0 +1,121 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyOnnxRuntime(CMakePackage, PythonPackage):
+ """ONNX Runtime is a performance-focused complete scoring
+ engine for Open Neural Network Exchange (ONNX) models, with
+ an open extensible architecture to continually address the
+ latest developments in AI and Deep Learning. ONNX Runtime
+ stays up to date with the ONNX standard with complete
+ implementation of all ONNX operators, and supports all
+ ONNX releases (1.2+) with both future and backwards
+ compatibility."""
+
+ homepage = "https://github.com/microsoft/onnxruntime"
+ git = "https://github.com/microsoft/onnxruntime.git"
+
+ version('1.7.2', tag='v1.7.2', submodules=True)
+
+ variant('cuda', default=False, description='Build with CUDA support')
+
+ depends_on('cmake@3.1:', type='build')
+ depends_on('ninja', type='build')
+ depends_on('python', type=('build', 'run'))
+ depends_on('protobuf')
+ depends_on('py-protobuf', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.16.6:', type=('build', 'run'))
+ depends_on('py-wheel', type='build')
+ depends_on('py-onnx', type=('build', 'run'))
+ depends_on('zlib')
+ depends_on('libpng')
+ depends_on('py-pybind11', type='build')
+ depends_on('cuda', when='+cuda')
+ depends_on('cudnn', when='+cuda')
+ depends_on('iconv', type=('build', 'link', 'run'))
+ depends_on('re2+shared')
+
+ extends('python')
+ # Adopted from CMS experiment's fork of onnxruntime
+ # https://github.com/cms-externals/onnxruntime/compare/5bc92df...d594f80
+ patch('cms.patch', level=1, when='@1.7.2')
+ # https://github.com/microsoft/onnxruntime/issues/4234#issuecomment-698077636
+ patch('libiconv.patch', level=0, when='@1.7.2')
+ # https://github.com/microsoft/onnxruntime/commit/de4089f8cbe0baffe56a363cc3a41595cc8f0809.patch
+ patch('gcc11.patch', level=1, when='@1.7.2')
+
+ dynamic_cpu_arch_values = ('NOAVX', 'AVX', 'AVX2', 'AVX512')
+
+ variant('dynamic_cpu_arch', default='AVX512',
+ values=dynamic_cpu_arch_values, multi=False,
+ description='AVX support level')
+
+ generator = 'Ninja'
+ root_cmakelists_dir = 'cmake'
+
+ def setup_build_environment(self, env):
+ value = self.spec.variants['dynamic_cpu_arch'].value
+ value = self.dynamic_cpu_arch_values.index(value)
+ env.set('MLAS_DYNAMIC_CPU_ARCH', str(value))
+
+ def setup_run_environment(self, env):
+ value = self.spec.variants['dynamic_cpu_arch'].value
+ value = self.dynamic_cpu_arch_values.index(value)
+ env.set('MLAS_DYNAMIC_CPU_ARCH', str(value))
+
+ def cmake_args(self):
+ define = self.define
+ define_from_variant = self.define_from_variant
+
+ args = [define('onnxruntime_ENABLE_PYTHON', True),
+ define('onnxruntime_BUILD_SHARED_LIB', True),
+ define_from_variant('onnxruntime_USE_CUDA', 'cuda'),
+ define('onnxruntime_BUILD_CSHARP', False),
+ define('onnxruntime_USE_EIGEN_FOR_BLAS', True),
+ define('onnxruntime_USE_OPENBLAS', False),
+ define("onnxruntime_USE_MKLML", False),
+ define("onnxruntime_USE_NGRAPH", False),
+ define("onnxruntime_USE_OPENMP", False),
+ define("onnxruntime_USE_TVM", False),
+ define("onnxruntime_USE_LLVM", False),
+ define("onnxruntime_ENABLE_MICROSOFT_INTERNAL", False),
+ define("onnxruntime_USE_BRAINSLICE", False),
+ define("onnxruntime_USE_NUPHAR", False),
+ define("onnxruntime_USE_TENSORRT", False),
+ define("onnxruntime_CROSS_COMPILING", False),
+ define("onnxruntime_USE_FULL_PROTOBUF", True),
+ define("onnxruntime_DISABLE_CONTRIB_OPS", False),
+ define("onnxruntime_USE_PREINSTALLED_PROTOBUF", True),
+ define("onnxruntime_PREFER_SYSTEM_LIB", True)]
+
+ if self.spec.satisfies('+cuda'):
+ args.extend((
+ define('onnxruntime_CUDA_VERSION', str(self.spec['cuda'].version)),
+ define('onnxruntime_CUDA_HOME', self.spec['cuda'].prefix),
+ define('onnxruntime_CUDNN_HOME', self.spec['cudnn'].prefix),
+ define('CMAKE_CUDA_FLAGS', '-cudart shared'),
+ define('CMAKE_CUDA_RUNTIME_LIBRARY', 'Shared'),
+ define('DCMAKE_TRY_COMPILE_PLATFORM_VARIABLES',
+ 'CMAKE_CUDA_RUNTIME_LIBRARY')
+ ))
+
+ return args
+
+ def setup_file(self):
+ return join_path(self.stage.source_path, 'setup.py')
+
+ @run_after('build')
+ def build_python(self):
+ """Build everything needed to install."""
+ with working_dir(self.stage.source_path):
+ PythonPackage.build(self, self.spec, self.prefix)
+
+ @run_after('install')
+ def install_python(self):
+ with working_dir(self.stage.source_path):
+ PythonPackage.install(self, self.spec, self.prefix)
diff --git a/var/spack/repos/builtin/packages/py-onnx/package.py b/var/spack/repos/builtin/packages/py-onnx/package.py
index 904b392a99..c5a500203f 100644
--- a/var/spack/repos/builtin/packages/py-onnx/package.py
+++ b/var/spack/repos/builtin/packages/py-onnx/package.py
@@ -23,13 +23,15 @@ class PyOnnx(PythonPackage):
version('1.5.0', sha256='1a584a4ef62a6db178c257fffb06a9d8e61b41c0a80bfd8bcd8a253d72c4b0b4')
depends_on('py-setuptools', type='build')
- depends_on('protobuf')
- depends_on('py-protobuf+cpp', type=('build', 'run'))
+ # Protobuf version limit is due to https://github.com/protocolbuffers/protobuf/pull/8794
+ depends_on('protobuf@:3.17')
+ depends_on('py-protobuf+cpp@:3.17', type=('build', 'run'))
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'))
depends_on('py-typing@3.6.4:', when='^python@:3.4', type=('build', 'run'))
- depends_on('py-typing-extensions@3.6.4:', type=('build', 'run'))
+ depends_on('py-typing-extensions@3.6.2.1:', type=('build', 'run'))
depends_on('cmake@3.1:', type='build')
+ depends_on('py-pytest-runner', type='build')
# 'python_out' does not recognize dllexport_decl.
patch('remove_dllexport_decl.patch', when='@:1.6.0')
diff --git a/var/spack/repos/builtin/packages/py-onnxconverter-common/package.py b/var/spack/repos/builtin/packages/py-onnxconverter-common/package.py
new file mode 100644
index 0000000000..168cebc737
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-onnxconverter-common/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyOnnxconverterCommon(PythonPackage):
+ """ONNX Converter and Optimization Tools"""
+
+ homepage = "https://github.com/microsoft/onnxconverter-common"
+ url = "https://github.com/microsoft/onnxconverter-common/archive/refs/tags/v1.9.0.tar.gz"
+
+ version('1.9.0', sha256='32315bcc844a8203092f3117a4a092ac6cf03d6a20145477e284f1172557d6f9')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-protobuf', type=('build', 'run'))
+ depends_on('py-onnx', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-onnxmltools/package.py b/var/spack/repos/builtin/packages/py-onnxmltools/package.py
new file mode 100644
index 0000000000..dacaef2822
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-onnxmltools/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyOnnxmltools(PythonPackage):
+ """Converts Machine Learning models to ONNX"""
+
+ homepage = "https://github.com/onnx/onnxmltools"
+ pypi = "onnxmltools/onnxmltools-1.10.0.tar.gz"
+
+ version('1.10.0', sha256='4eb4605f18ed66553fc17438ac8cf5406d66dcc624bedd76d8067e1b08e6c75d')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-onnx', type=('build', 'run'))
+ depends_on('py-skl2onnx', type=('build', 'run'))
+ depends_on('py-onnx-runtime', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-openpmd-validator/package.py b/var/spack/repos/builtin/packages/py-openpmd-validator/package.py
index 08bd31120b..f44e0f1174 100644
--- a/var/spack/repos/builtin/packages/py-openpmd-validator/package.py
+++ b/var/spack/repos/builtin/packages/py-openpmd-validator/package.py
@@ -12,9 +12,13 @@ class PyOpenpmdValidator(PythonPackage):
openPMD is an open standard for particle-mesh data files."""
homepage = "https://www.openPMD.org"
- url = "https://github.com/openPMD/openPMD-validator/archive/1.0.0.2.tar.gz"
+ url = "https://github.com/openPMD/openPMD-validator/archive/refs/tags/1.1.0.2.tar.gz"
+ git = "https://github.com/openPMD/openPMD-validator.git"
+
maintainers = ['ax3l']
+ version('1.1.0.2', sha256='b30be7957c2e1e7de67d81fad64492c3a1ecd25db231293d896da116a71ecca5')
+ version('1.1.0.1', sha256='93031f50ddeb747ebd6aabca249aa6bf0491d570de56746d7a98d6453427f191')
version('1.0.0.2', sha256='1b97452991feb0f0ac1ffb3c92b7f9743a86b0b5390dbbfb21160e04f0a35a95')
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-openpmd-viewer/package.py b/var/spack/repos/builtin/packages/py-openpmd-viewer/package.py
new file mode 100644
index 0000000000..a348b8b4f7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-openpmd-viewer/package.py
@@ -0,0 +1,56 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyOpenpmdViewer(PythonPackage):
+ """Python visualization tools for openPMD files"""
+
+ homepage = "https://www.openPMD.org"
+ git = "https://github.com/openPMD/openPMD-viewer.git"
+ pypi = 'openPMD-viewer/openPMD-viewer-1.2.0.tar.gz'
+
+ maintainers = ['RemiLehe', 'ax3l']
+
+ version('1.2.0', sha256='a27f8ac522c4c76fd774095e156a8b280c9211128f50aa07f16ac70d8222384d')
+
+ variant('backend', default='h5py,openpmd-api', multi=True,
+ values=('h5py', 'openpmd-api'))
+ variant('jupyter', default=False,
+ description='Enable Jupyter Widget GUI')
+ variant('numba', default=False,
+ description='Enable accelerated depositions for histograms')
+ variant('plot', default=True,
+ description='Enable plotting support')
+ variant('tutorials', default=True,
+ description='Enable dependencies for tutorials')
+
+ depends_on('python@3.5:', type=('build', 'run'))
+ depends_on('py-numpy@1.15:1', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-tqdm', type=('build', 'run'))
+
+ depends_on('py-h5py@2.8.0:', type=('build', 'run'))
+ with when('backend=openpmd-api'):
+ depends_on('openpmd-api +python -mpi', type=('build', 'run'))
+
+ with when('+jupyter'):
+ depends_on('py-ipywidgets', type=('build', 'run'))
+ depends_on('py-jupyter', type=('build', 'run'))
+ depends_on('py-tqdm +notebook', type=('build', 'run'))
+
+ with when('+numba'):
+ depends_on('py-numba', type=('build', 'run'))
+
+ with when('+plot'):
+ depends_on('py-matplotlib', type=('build', 'run'))
+ # missing in Spack:
+ # with when('+jupyter'):
+ # depends_on('py-ipympl', type=('build', 'run'))
+
+ with when('+tutorials'):
+ depends_on('py-wget', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-opentuner/package.py b/var/spack/repos/builtin/packages/py-opentuner/package.py
index 0e367bf55d..f4f054e8ec 100644
--- a/var/spack/repos/builtin/packages/py-opentuner/package.py
+++ b/var/spack/repos/builtin/packages/py-opentuner/package.py
@@ -14,6 +14,7 @@ class PyOpentuner(PythonPackage):
maintainers = ['matthiasdiener']
+ version('0.8.7', commit='070c5ce')
version('0.8.2', commit='8e720a2')
version('0.8.0', commit='4cb9135')
diff --git a/var/spack/repos/builtin/packages/py-packaging/package.py b/var/spack/repos/builtin/packages/py-packaging/package.py
index 37ad3aadca..c64cbf7edc 100644
--- a/var/spack/repos/builtin/packages/py-packaging/package.py
+++ b/var/spack/repos/builtin/packages/py-packaging/package.py
@@ -12,6 +12,7 @@ class PyPackaging(PythonPackage):
homepage = "https://github.com/pypa/packaging"
pypi = "packaging/packaging-19.2.tar.gz"
+ version('21.3', sha256='dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb')
version('21.0', sha256='7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7')
version('20.9', sha256='5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5')
version('19.2', sha256='28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47')
@@ -20,11 +21,13 @@ class PyPackaging(PythonPackage):
version('17.1', sha256='f019b770dd64e585a99714f1fd5e01c7a8f11b45635aa953fd41c689a657375b')
version('16.8', sha256='5d50835fdf0a7edf0b55e311b7c887786504efea1177abd7e69329a8e5ea619e')
- depends_on('python@3.4:', when='@21:', type=('build', 'run'))
- depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@3.6:', when='@21:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.4:', type=('build', 'run'))
depends_on('py-setuptools@40.8.0:', when='@20.8:', type='build')
depends_on('py-setuptools', type='build')
+ depends_on('py-pyparsing@2.0.2:3.0.4,3.0.6:', when='@21.3:', type=('build', 'run'))
+ depends_on('py-pyparsing@2.0.2:2', when='@21.1:21.2', type=('build', 'run'))
depends_on('py-pyparsing@2.0.2:', type=('build', 'run'))
- depends_on('py-six', type=('build', 'run'), when='@:20.7')
- depends_on('py-attrs', type=('build', 'run'), when='@19.1')
+ depends_on('py-six', when='@:20.7', type=('build', 'run'))
+ depends_on('py-attrs', when='@19.1', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py
index 9496908dd4..f592db859b 100644
--- a/var/spack/repos/builtin/packages/py-pandas/package.py
+++ b/var/spack/repos/builtin/packages/py-pandas/package.py
@@ -14,6 +14,7 @@ class PyPandas(PythonPackage):
maintainers = ['adamjstewart']
+ version('1.3.5', sha256='1e4285f5de1012de20ca46b188ccf33521bff61ba5c5ebd78b4fb28e5416a9f1')
version('1.3.4', sha256='a2aa18d3f0b7d538e21932f637fbfe8518d085238b429e4790a35e1e44a96ffc')
version('1.3.3', sha256='272c8cb14aa9793eada6b1ebe81994616e647b5892a370c7135efb2924b701df')
version('1.3.2', sha256='cbcb84d63867af3411fa063af3de64902665bb5b3d40b25b2059e40603594e87')
@@ -64,6 +65,7 @@ class PyPandas(PythonPackage):
depends_on('py-cython@0.29.24:2', type='build', when='@1.3.4:')
depends_on('py-setuptools@24.2.0:', type='build')
depends_on('py-setuptools@38.6.0:', type='build', when='@1.3:')
+ depends_on('py-setuptools@51.0.0:', type='build', when='@1.3.2:')
depends_on('py-numpy', type=('build', 'run'))
# 'NUMPY_IMPORT_ARRAY_RETVAL' was removed in numpy@0.19
depends_on('py-numpy@:1.18', type=('build', 'run'), when='@:0.25')
@@ -91,7 +93,7 @@ class PyPandas(PythonPackage):
@property
def import_modules(self):
- modules = super(PyPandas, self).import_modules()
+ modules = super(PythonPackage, self).import_modules
ignored_imports = ["pandas.tests", "pandas.plotting._matplotlib"]
diff --git a/var/spack/repos/builtin/packages/py-pandocfilters/package.py b/var/spack/repos/builtin/packages/py-pandocfilters/package.py
index 719ac3163f..5f200dfa3f 100644
--- a/var/spack/repos/builtin/packages/py-pandocfilters/package.py
+++ b/var/spack/repos/builtin/packages/py-pandocfilters/package.py
@@ -12,4 +12,10 @@ class PyPandocfilters(PythonPackage):
homepage = "https://github.com/jgm/pandocfilters"
pypi = "pandocfilters/pandocfilters-1.4.2.tar.gz"
+ version('1.5.0', sha256='0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38')
version('1.4.2', sha256='b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9')
+
+ depends_on('python@2.7:2,3.4:', type=('build', 'run'), when='@1.5.0:')
+ # Notice: @:1.4 uses distutils and won't build if py-setuptools are present:
+ # error: option --single-version-externally-managed not recognized
+ depends_on('py-setuptools', type='build', when='@1.5.0:')
diff --git a/var/spack/repos/builtin/packages/py-parmed/package.py b/var/spack/repos/builtin/packages/py-parmed/package.py
new file mode 100644
index 0000000000..1eaad013b5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-parmed/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyParmed(PythonPackage):
+ """ParmEd is a general tool for aiding in investigations of
+ biomolecular systems using popular molecular simulation
+ packages, like Amber, CHARMM, and OpenMM written in
+ Python."""
+
+ homepage = "https://parmed.github.io/ParmEd/html/index.html"
+ pypi = "ParmEd/ParmEd-3.4.3.tar.gz"
+
+ version('3.4.3', sha256='90afb155e3ffe69230a002922b28968464126d4450059f0bd97ceca679c6627c')
+
+ depends_on('python@2.7:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-parsimonious/package.py b/var/spack/repos/builtin/packages/py-parsimonious/package.py
new file mode 100644
index 0000000000..c46d18de01
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-parsimonious/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyParsimonious(PythonPackage):
+ """(Soon to be) the fastest pure-Python PEG parser"""
+
+ homepage = "https://github.com/erikrose/parsimonious"
+ pypi = "parsimonious/parsimonious-0.8.1.tar.gz"
+
+ version('0.8.1', sha256='3add338892d580e0cb3b1a39e4a1b427ff9f687858fdd61097053742391a9f6b')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six@1.9.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-parso/package.py b/var/spack/repos/builtin/packages/py-parso/package.py
index 07364b0bbb..61518517fb 100644
--- a/var/spack/repos/builtin/packages/py-parso/package.py
+++ b/var/spack/repos/builtin/packages/py-parso/package.py
@@ -14,6 +14,7 @@ class PyParso(PythonPackage):
pypi = "parso/parso-0.6.1.tar.gz"
+ version('0.8.2', sha256='12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398')
version('0.8.1', sha256='8519430ad07087d4c997fda3a7918f7cfa27cb58972a8c89c2a0295a1c940e9e')
version('0.7.1', sha256='caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9')
version('0.6.1', sha256='56b2105a80e9c4df49de85e125feb6be69f49920e121406f15e7acde6c9dfc57')
diff --git a/var/spack/repos/builtin/packages/py-pathlib2/package.py b/var/spack/repos/builtin/packages/py-pathlib2/package.py
index c8a2427603..b4055f3c50 100644
--- a/var/spack/repos/builtin/packages/py-pathlib2/package.py
+++ b/var/spack/repos/builtin/packages/py-pathlib2/package.py
@@ -11,6 +11,7 @@ class PyPathlib2(PythonPackage):
pypi = "pathlib2/pathlib2-2.3.2.tar.gz"
+ version('2.3.6', sha256='7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f')
version('2.3.3', sha256='25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742')
version('2.3.2', sha256='8eb170f8d0d61825e09a95b38be068299ddeda82f35e96c3301a8a5e7604cb83')
version('2.1.0', sha256='deb3a960c1d55868dfbcac98432358b92ba89d95029cddd4040db1f27405055c')
diff --git a/var/spack/repos/builtin/packages/py-pbr/package.py b/var/spack/repos/builtin/packages/py-pbr/package.py
index 09bb49d295..5acc9dc180 100644
--- a/var/spack/repos/builtin/packages/py-pbr/package.py
+++ b/var/spack/repos/builtin/packages/py-pbr/package.py
@@ -15,6 +15,7 @@ class PyPbr(PythonPackage):
# Skip 'pbr.tests' imports
import_modules = ['pbr', 'pbr.cmd', 'pbr.hooks']
+ version('5.7.0', sha256='4651ca1445e80f2781827305de3d76b3ce53195f2227762684eb08f17bc473b7')
version('5.4.3', sha256='2c8e420cd4ed4cec4e7999ee47409e876af575d4c35a45840d59e8b5f3155ab8')
version('5.2.1', sha256='93d2dc6ee0c9af4dbc70bc1251d0e545a9910ca8863774761f92716dece400b6')
version('3.1.1', sha256='05f61c71aaefc02d8e37c0a3eeb9815ff526ea28b3b76324769e6158d7f95be1')
diff --git a/var/spack/repos/builtin/packages/py-pep517/package.py b/var/spack/repos/builtin/packages/py-pep517/package.py
new file mode 100644
index 0000000000..8dcd03c1fa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pep517/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPep517(PythonPackage):
+ """Wrappers to build Python packages using PEP 517 hooks."""
+
+ homepage = "https://github.com/pypa/pep517"
+ pypi = "pep517/pep517-0.12.0.tar.gz"
+
+ version('0.12.0', sha256='931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0')
+
+ depends_on('py-toml', when='^python@:3.5', type=('build', 'run'))
+ depends_on('py-importlib-metadata', when='^python@:3.7', type=('build', 'run'))
+ depends_on('py-zipp', when='^python@:3.7', type=('build', 'run'))
+ depends_on('py-tomli@1.1:', when='^python@3.6:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-petsc4py/package.py b/var/spack/repos/builtin/packages/py-petsc4py/package.py
index 6a27c71a12..86f75c30c1 100644
--- a/var/spack/repos/builtin/packages/py-petsc4py/package.py
+++ b/var/spack/repos/builtin/packages/py-petsc4py/package.py
@@ -17,6 +17,7 @@ class PyPetsc4py(PythonPackage):
maintainers = ['balay']
version('main', branch='main')
+ version('3.16.2', sha256='906634497ae9c59f2c97e12b935954e5ba95df2e764290c24fff6751b7510b04')
version('3.16.1', sha256='c218358217c436947f8fd61f247f73ac65fa29ea3489ad00bef5827b1436b95f')
version('3.16.0', sha256='4044accfdc2c80994e80e4e286478d1ba9ac358512d1b74c42e1327eadb0d802')
version('3.15.5', sha256='cdbc8a7485960c80565268ae851639f6c620663f245708263a349903dd07e5ae')
diff --git a/var/spack/repos/builtin/packages/py-pexpect/package.py b/var/spack/repos/builtin/packages/py-pexpect/package.py
index 431219bf4e..36b3ee8d78 100644
--- a/var/spack/repos/builtin/packages/py-pexpect/package.py
+++ b/var/spack/repos/builtin/packages/py-pexpect/package.py
@@ -10,6 +10,7 @@ class PyPexpect(PythonPackage):
"""Pexpect allows easy control of interactive console applications."""
pypi = "pexpect/pexpect-4.2.1.tar.gz"
+ version('4.8.0', sha256='fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c')
version('4.7.0', sha256='9e2c1fd0e6ee3a49b28f95d4b33bc389c89b20af6a1255906e90ff1262ce62eb')
version('4.6.0', sha256='2a8e88259839571d1251d278476f3eec5db26deb73a70be5ed5dc5435e418aba')
version('4.2.1', sha256='3d132465a75b57aa818341c6521392a06cc660feb3988d7f1074f39bd23c9a92')
diff --git a/var/spack/repos/builtin/packages/py-pickle5/package.py b/var/spack/repos/builtin/packages/py-pickle5/package.py
new file mode 100644
index 0000000000..707b4b569a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pickle5/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPickle5(PythonPackage):
+ """This package backports all features and APIs added in the pickle module
+ in Python 3.8.3, including the PEP 574 additions. It should work with Python
+ 3.5, 3.6 and 3.7."""
+
+ homepage = "https://github.com/pitrou/pickle5-backport"
+ pypi = "pickle5/pickle5-0.0.11.tar.gz"
+
+ version('0.0.11', sha256='7e013be68ba7dde1de5a8dbcc241f201dab1126e326715916ce4a26c27919ffc')
+
+ depends_on('python@3.5:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-picmistandard/package.py b/var/spack/repos/builtin/packages/py-picmistandard/package.py
index 7c850b36c1..c5c1fe6b96 100644
--- a/var/spack/repos/builtin/packages/py-picmistandard/package.py
+++ b/var/spack/repos/builtin/packages/py-picmistandard/package.py
@@ -10,16 +10,26 @@ class PyPicmistandard(PythonPackage):
"""Standard input format for Particle-In-Cell codes"""
homepage = "https://picmi-standard.github.io"
- url = "https://github.com/picmi-standard/picmi/archive/refs/tags/0.0.14.tar.gz"
git = "https://github.com/picmi-standard/picmi.git"
+ pypi = 'picmistandard/picmistandard-0.0.18.tar.gz'
maintainers = ['ax3l', 'dpgrote', 'RemiLehe']
version('develop', branch='master')
- version('0.0.14', sha256='b7eefdae1c43119984226b2df358c86fdeef7495084e47b3575e3d07e790ba30')
+ version('0.0.18', sha256='68c208c0c54b4786e133bb13eef0dd4824998da4906285987ddee84e6d195e71')
+ # 0.15 - 0.17 have broken install logic: missing requirements.txt on pypi
+ version('0.0.16', sha256='b7eefdae1c43119984226b2df358c86fdeef7495084e47b3575e3d07e790ba30',
+ url='https://github.com/picmi-standard/picmi/archive/refs/tags/0.0.14.tar.gz')
+ version('0.0.14', sha256='8f83b25b281fc0309a0c4f75c7605afd5fa0ef4df3b3ac115069478c119bc8c3')
depends_on('python@3.6:', type=('build', 'run'))
- depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-numpy@1.15:1', type=('build', 'run'))
+ depends_on('py-scipy@1.5:1', type=('build', 'run'))
depends_on('py-setuptools', type='build')
- build_directory = 'PICMI_Python'
+ @property
+ def build_directory(self):
+ if self.spec.satisfies('@develop') or self.spec.satisfies('@0.0.16'):
+ return 'PICMI_Python'
+ else:
+ return './'
diff --git a/var/spack/repos/builtin/packages/py-pika/package.py b/var/spack/repos/builtin/packages/py-pika/package.py
index 236c81c2c4..236c81c2c4 100755..100644
--- a/var/spack/repos/builtin/packages/py-pika/package.py
+++ b/var/spack/repos/builtin/packages/py-pika/package.py
diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py
index 374b7123b4..333c6a3590 100644
--- a/var/spack/repos/builtin/packages/py-pillow/package.py
+++ b/var/spack/repos/builtin/packages/py-pillow/package.py
@@ -25,13 +25,15 @@ class PyPillowBase(PythonPackage):
# Required dependencies
# https://pillow.readthedocs.io/en/latest/installation.html#notes
- depends_on('python@3.6:3.9', when='@8:', type=('build', 'run'))
+ depends_on('python@3.7:3.10', when='@9:', type=('build', 'run'))
+ depends_on('python@3.6:3.10', when='@8.3.2:8.4', type=('build', 'run'))
+ depends_on('python@3.6:3.9', when='@8:8.3.1', type=('build', 'run'))
depends_on('python@3.5:3.8', when='@7.0:7.2', type=('build', 'run'))
depends_on('python@2.7:2.8,3.5:3.8', when='@6.2.1:6.2.2', type=('build', 'run'))
depends_on('python@2.7:2.8,3.5:3.7', when='@6.0:6.2.0', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:3.7', when='@5.2:5.4', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:3.6', when='@5.0:5.1', type=('build', 'run'))
- depends_on('python@2.7:2.8,3.3:3.6', when='@4.0:4', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.3:3.6', when='@4.0:4', type=('build', 'run'))
depends_on('python@2.6:2.8,3.2:3.5', when='@2:3', type=('build', 'run'))
depends_on('python@2.4:2.7', when='@:1', type=('build', 'run'))
depends_on('py-setuptools', type='build')
@@ -108,6 +110,7 @@ class PyPillow(PyPillowBase):
homepage = "https://python-pillow.org/"
pypi = "Pillow/Pillow-7.2.0.tar.gz"
+ version('8.4.0', sha256='b8e2f83c56e141920c39464b852de3719dfbfb6e3c99a2d8da0edf4fb33176ed')
version('8.0.0', sha256='59304c67d12394815331eda95ec892bf54ad95e0aa7bc1ccd8e0a4a5a25d4bf3')
version('7.2.0', sha256='97f9e7953a77d5a70f49b9a48da7776dc51e9b738151b22dacf101641594a626')
version('7.0.0', sha256='4d9ed9a64095e031435af120d3c910148067087541131e82b3e8db302f4c8946')
@@ -121,7 +124,10 @@ class PyPillow(PyPillowBase):
version('3.0.0', sha256='ad50bef540fe5518a4653c3820452a881b6a042cb0f8bb7657c491c6bd3654bb')
for ver in [
- '7.2.0', '7.0.0', '6.2.2', '6.2.1', '6.2.0', '6.0.0',
- '5.4.1', '5.1.0', '3.2.0', '3.0.0'
+ '8.4.0', '8.0.0',
+ '7.2.0', '7.0.0',
+ '6.2.2', '6.2.1', '6.2.0', '6.0.0',
+ '5.4.1', '5.1.0',
+ '3.2.0', '3.0.0'
]:
provides('pil@' + ver, when='@' + ver)
diff --git a/var/spack/repos/builtin/packages/py-pkgconfig/package.py b/var/spack/repos/builtin/packages/py-pkgconfig/package.py
index 0f2e7171e0..7f3270b63a 100644
--- a/var/spack/repos/builtin/packages/py-pkgconfig/package.py
+++ b/var/spack/repos/builtin/packages/py-pkgconfig/package.py
@@ -12,11 +12,12 @@ class PyPkgconfig(PythonPackage):
homepage = "https://github.com/matze/pkgconfig"
pypi = "pkgconfig/pkgconfig-1.2.2.tar.gz"
+ version('1.5.5', sha256='deb4163ef11f75b520d822d9505c1f462761b4309b1bb713d08689759ea8b899')
version('1.5.1', sha256='97bfe3d981bab675d5ea3ef259045d7919c93897db7d3b59d4e8593cba8d354f')
version('1.4.0', sha256='048c3b457da7b6f686b647ab10bf09e2250e4c50acfe6f215398a8b5e6fcdb52')
version('1.2.2', sha256='3685ba02a9b72654a764b728b559f327e1dbd7dc6ebc310a1bd429666ee202aa')
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
depends_on('python@2.7:2.8,3.3:', when='@1.5:', type=('build', 'run'))
- depends_on('py-setuptools', when='@:1.4', type=('build', 'run'))
+ depends_on('py-setuptools', when='@:1.4,1.5.5:', type=('build', 'run'))
depends_on('pkgconfig', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pkginfo/package.py b/var/spack/repos/builtin/packages/py-pkginfo/package.py
index 7d6efa869e..77e0b16a4f 100644
--- a/var/spack/repos/builtin/packages/py-pkginfo/package.py
+++ b/var/spack/repos/builtin/packages/py-pkginfo/package.py
@@ -12,6 +12,7 @@ class PyPkginfo(PythonPackage):
homepage = "https://code.launchpad.net/~tseaver/pkginfo/trunk"
pypi = "pkginfo/pkginfo-1.5.0.1.tar.gz"
+ version('1.7.1', sha256='e7432f81d08adec7297633191bbf0bd47faf13cd8724c3a13250e51d542635bd')
version('1.5.0.1', sha256='7424f2c8511c186cd5424bbf31045b77435b37a8d604990b79d4e70d741148bb')
depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-plac/package.py b/var/spack/repos/builtin/packages/py-plac/package.py
index 746e8ead71..0aabec4e89 100644
--- a/var/spack/repos/builtin/packages/py-plac/package.py
+++ b/var/spack/repos/builtin/packages/py-plac/package.py
@@ -10,6 +10,10 @@ class PyPlac(PythonPackage):
homepage = "https://github.com/micheles/plac"
pypi = "plac/plac-1.1.3.tar.gz"
+ # Skip 'plac_tk' imports
+ import_modules = ['plac', 'plac_ext', 'plac_core']
+
+ version('1.3.3', sha256='51e332dabc2aed2cd1f038be637d557d116175101535f53eaa7ae854a00f2a74')
version('1.1.3', sha256='398cb947c60c4c25e275e1f1dadf027e7096858fb260b8ece3b33bcff90d985f')
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-platformdirs/package.py b/var/spack/repos/builtin/packages/py-platformdirs/package.py
new file mode 100644
index 0000000000..e1d0c126f0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-platformdirs/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPlatformdirs(PythonPackage):
+ """A small Python module for determining appropriate
+ platform-specific dirs, e.g. a "user data dir" """
+
+ homepage = "https://github.com/platformdirs/platformdirs"
+ pypi = "platformdirs/platformdirs-2.4.0.tar.gz"
+
+ version('2.4.0', sha256='367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@44:', type='build')
+ depends_on('py-setuptools-scm@5:+toml', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pluggy/package.py b/var/spack/repos/builtin/packages/py-pluggy/package.py
index 7c543bbdd1..1dac0042b1 100644
--- a/var/spack/repos/builtin/packages/py-pluggy/package.py
+++ b/var/spack/repos/builtin/packages/py-pluggy/package.py
@@ -12,6 +12,7 @@ class PyPluggy(PythonPackage):
homepage = "https://github.com/pytest-dev/pluggy"
pypi = "pluggy/pluggy-0.13.0.tar.gz"
+ version('1.0.0', sha256='4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159')
version('0.13.0', sha256='fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34')
version('0.12.0', sha256='0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc')
version('0.9.0', sha256='19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f')
@@ -20,6 +21,7 @@ class PyPluggy(PythonPackage):
version('0.6.0', sha256='7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@1.0.0:')
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
diff --git a/var/spack/repos/builtin/packages/py-ply/package.py b/var/spack/repos/builtin/packages/py-ply/package.py
index 2c1b064d76..1bb6f9947e 100644
--- a/var/spack/repos/builtin/packages/py-ply/package.py
+++ b/var/spack/repos/builtin/packages/py-ply/package.py
@@ -7,9 +7,12 @@ from spack import *
class PyPly(PythonPackage):
- """PLY is nothing more than a straightforward lex/yacc implementation."""
+ """Python Lex & Yacc."""
+
homepage = "http://www.dabeaz.com/ply"
- url = "https://www.dabeaz.com/ply/ply-3.11.tar.gz"
+ pypi = "ply/ply-3.11.tar.gz"
version('3.11', sha256='00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3')
version('3.8', sha256='e7d1bdff026beb159c9942f7a17e102c375638d9478a7ecd4cc0c76afd8de0b8')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pmw-patched/package.py b/var/spack/repos/builtin/packages/py-pmw-patched/package.py
new file mode 100644
index 0000000000..f17bf63452
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pmw-patched/package.py
@@ -0,0 +1,14 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class PyPmwPatched(PythonPackage):
+ """Schrodinger's Fork of Python megawidgets with essential patches applied.
+ Pmw is a toolkit for building high-level compound widgets, or
+ megawidgets, constructed using other widgets as component parts."""
+ homepage = "https://github.com/schrodinger/pmw-patched"
+ git = "https://github.com/schrodinger/pmw-patched"
+
+ version('02-10-2020', commit='8bedfc8747e7757c1048bc5e11899d1163717a43')
diff --git a/var/spack/repos/builtin/packages/py-pooch/package.py b/var/spack/repos/builtin/packages/py-pooch/package.py
index 3bdb51c1b6..07336a3751 100644
--- a/var/spack/repos/builtin/packages/py-pooch/package.py
+++ b/var/spack/repos/builtin/packages/py-pooch/package.py
@@ -14,10 +14,12 @@ class PyPooch(PythonPackage):
homepage = "https://github.com/fatiando/pooch"
pypi = "pooch/pooch-1.3.0.tar.gz"
+ version('1.5.2', sha256='5969b2f1defbdc405df932767e05e0b536e2771c27f1f95d7f260bc99bf13581')
version('1.3.0', sha256='30d448e825904e2d763bbbe418831a788813c32f636b21c8d60ee5f474532898')
depends_on('python@3.6:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools-scm', when='@1.4:', type='build')
depends_on('py-requests', type=('build', 'run'))
depends_on('py-packaging', type=('build', 'run'))
depends_on('py-appdirs', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-prettytable/package.py b/var/spack/repos/builtin/packages/py-prettytable/package.py
index cad349dc3e..21ca40def5 100644
--- a/var/spack/repos/builtin/packages/py-prettytable/package.py
+++ b/var/spack/repos/builtin/packages/py-prettytable/package.py
@@ -12,9 +12,14 @@ class PyPrettytable(PythonPackage):
appealing ASCII tables.
"""
- homepage = "https://code.google.com/archive/p/prettytable/"
+ homepage = "https://github.com/jazzband/prettytable"
pypi = "prettytable/prettytable-0.7.2.tar.gz"
+ version('2.4.0', sha256='18e56447f636b447096977d468849c1e2d3cfa0af8e7b5acfcf83a64790c0aca')
version('0.7.2', sha256='2d5460dc9db74a32bcc8f9f67de68b2c4f4d2f01fa3bd518764c69156d9cacd9')
depends_on("py-setuptools", type='build')
+ depends_on("py-wcwidth", type=('build', 'run'), when='@2.4.0:')
+ depends_on("py-importlib-metadata", type=('build', 'run'), when='@2.4.0: ^python@:3.7')
+ depends_on("py-setuptools-scm", type='build', when='@2.4.0:')
+ depends_on("python@3.6:", type=('build', 'run'), when='@2.4.0:')
diff --git a/var/spack/repos/builtin/packages/py-progressbar2/package.py b/var/spack/repos/builtin/packages/py-progressbar2/package.py
index b64ab06dff..70fc7e58ff 100644
--- a/var/spack/repos/builtin/packages/py-progressbar2/package.py
+++ b/var/spack/repos/builtin/packages/py-progressbar2/package.py
@@ -13,6 +13,7 @@ class PyProgressbar2(PythonPackage):
homepage = "https://github.com/WoLpH/python-progressbar"
pypi = "progressbar2/progressbar2-3.50.1.tar.gz"
+ version('3.55.0', sha256='86835d1f1a9317ab41aeb1da5e4184975e2306586839d66daf63067c102f8f04')
version('3.50.1', sha256='2c21c14482016162852c8265da03886c2b4dea6f84e5a817ad9b39f6bd82a772')
version('3.39.3', sha256='8e5b5419e04193bb7c3fea71579937bbbcd64c26472b929718c2fe7ec420fe39')
diff --git a/var/spack/repos/builtin/packages/py-prometheus-client/package.py b/var/spack/repos/builtin/packages/py-prometheus-client/package.py
index 9bd8df3a64..160a5c2c09 100644
--- a/var/spack/repos/builtin/packages/py-prometheus-client/package.py
+++ b/var/spack/repos/builtin/packages/py-prometheus-client/package.py
@@ -11,6 +11,7 @@ class PyPrometheusClient(PythonPackage):
pypi = "prometheus_client/prometheus_client-0.7.1.tar.gz"
+ version('0.12.0', sha256='1b12ba48cee33b9b0b9de64a1047cbd3c5f2d0ab6ebcead7ddda613a750ec3c5')
version('0.7.1', sha256='71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da')
version('0.7.0', sha256='ee0c90350595e4a9f36591f291e6f9933246ea67d7cd7d1d6139a9781b14eaae')
version('0.5.0', sha256='e8c11ff5ca53de6c3d91e1510500611cafd1d247a937ec6c588a0a7cc3bef93c')
@@ -18,7 +19,11 @@ class PyPrometheusClient(PythonPackage):
variant('twisted', default=False, description='Expose metrics as a twisted resource')
depends_on('py-setuptools', type='build')
+ # Notice: prometheus_client/twisted/_exposition.py imports 'twisted.web.wsgi'
+ # which was not ported to Python 3 until twisted 16.0.0
depends_on('py-twisted', type=('build', 'run'), when='+twisted')
+ depends_on('py-twisted@16:', type=('build', 'run'), when='@0.12.0: +twisted ^python@3:')
+ depends_on('python@2.7:2,3.4:', type=('build', 'run'), when='@0.12.0:')
@property
def import_modules(self):
diff --git a/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py b/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py
index 7432627758..3efd6cd5b3 100644
--- a/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py
+++ b/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py
@@ -26,6 +26,7 @@ class PyPromptToolkit(PythonPackage):
'prompt_toolkit.clipboard'
]
+ version('3.0.24', sha256='1bb05628c7d87b645974a1bad3f17612be0c29fa39af9f7688030163f680bad6')
version('3.0.17', sha256='9397a7162cf45449147ad6042fa37983a081b8a73363a5253dd4072666333137')
version('3.0.16', sha256='0fa02fa80363844a4ab4b8d6891f62dd0645ba672723130423ca4037b80c1974')
version('3.0.7', sha256='822f4605f28f7d2ba6b0b09a31e25e140871e96364d1d377667b547bb3bf4489')
@@ -34,7 +35,8 @@ class PyPromptToolkit(PythonPackage):
version('1.0.16', sha256='c1cedd626e08b8ee830ee65897de754113ff3f3035880030c08b01674d85c5b4')
version('1.0.9', sha256='cd6523b36adc174cc10d54b1193eb626b4268609ff6ea92c15bcf1996609599c')
- depends_on('python@3.6.1:', when='@3:', type=('build', 'run'))
+ depends_on('python@3.6.2:', when='@3.0.24:', type=('build', 'run'))
+ depends_on('python@3.6.1:', when='@3:3.0.17', type=('build', 'run'))
depends_on('python@2.6:2.8,3.3:', when='@:2', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-six@1.9.0:', when='@:2', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-prwlock/package.py b/var/spack/repos/builtin/packages/py-prwlock/package.py
new file mode 100644
index 0000000000..6f7ed7c41c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-prwlock/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPrwlock(PythonPackage):
+ """Native process-shared rwlock support for Python"""
+
+ homepage = "https://github.com/renatolfc/prwlock"
+ pypi = "prwlock/prwlock-0.4.1.tar.gz"
+
+ version('0.4.1', sha256='a2fa773cb877207ae5b54c7cf5d224b0215c9f7b9ef16a88d33eadc5c9e1466e')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-py/package.py b/var/spack/repos/builtin/packages/py-py/package.py
index aa7ff323a5..9c398085a8 100644
--- a/var/spack/repos/builtin/packages/py-py/package.py
+++ b/var/spack/repos/builtin/packages/py-py/package.py
@@ -9,9 +9,10 @@ from spack import *
class PyPy(PythonPackage):
"""Library with cross-python path, ini-parsing, io, code, log facilities"""
- homepage = "https://pylib.readthedocs.io/en/latest/"
+ homepage = "https://py.readthedocs.io/en/latest/"
pypi = "py/py-1.8.0.tar.gz"
+ version('1.11.0', sha256='51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719')
version('1.9.0', sha256='9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342')
version('1.8.2', sha256='f3b3a4c36512a4c4f024041ab51866f11761cc169670204b235f6b20523d4e6b')
version('1.8.0', sha256='dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53')
@@ -21,6 +22,8 @@ class PyPy(PythonPackage):
version('1.4.31', sha256='a6501963c725fc2554dabfece8ae9a8fb5e149c0ac0a42fd2b02c5c1c57fc114')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@1.11.0:')
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
+ depends_on('py-setuptools-scm+toml', type='build', when='@1.11.0:')
diff --git a/var/spack/repos/builtin/packages/py-pyaml/package.py b/var/spack/repos/builtin/packages/py-pyaml/package.py
new file mode 100644
index 0000000000..148c42f1f2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pyaml/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPyaml(PythonPackage):
+ """PyYAML-based python module to produce pretty and readable
+ YAML-serialized data."""
+
+ maintainers = ['Kerilk', 'liuyangzhuan']
+
+ homepage = "https://github.com/mk-fg/pretty-yaml"
+ pypi = "pyaml/pyaml-21.8.3.tar.gz"
+
+ version('21.8.3', sha256='a1636d63c476328a07213d0b7111bb63570f1ab8a3eddf60522630250c23d975')
+
+ depends_on('python@2.7:2,3.5:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pyyaml', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pyasn1-modules/package.py b/var/spack/repos/builtin/packages/py-pyasn1-modules/package.py
index 6fc74662ff..cc338e4cb1 100644
--- a/var/spack/repos/builtin/packages/py-pyasn1-modules/package.py
+++ b/var/spack/repos/builtin/packages/py-pyasn1-modules/package.py
@@ -14,10 +14,11 @@ class PyPyasn1Modules(PythonPackage):
homepage = "https://github.com/etingof/pyasn1-modules"
pypi = "pyasn1-modules/pyasn1-modules-0.2.6.tar.gz"
+ version('0.2.8', sha256='905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e')
version('0.2.6', sha256='43c17a83c155229839cc5c6b868e8d0c6041dba149789b6d6e28801c64821722')
version('0.2.5', sha256='ef721f68f7951fab9b0404d42590f479e30d9005daccb1699b0a51bb4177db96')
depends_on('python@2.4:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
- depends_on('py-pyasn1@0.4.6:0.4', type=('build', 'run'), when='@0.2.6')
+ depends_on('py-pyasn1@0.4.6:0.4', type=('build', 'run'), when='@0.2.6:')
depends_on('py-pyasn1@0.4.1:0.4', type=('build', 'run'), when='@0.2.5')
diff --git a/var/spack/repos/builtin/packages/py-pyasn1/package.py b/var/spack/repos/builtin/packages/py-pyasn1/package.py
index 7b982c014d..ae25015893 100644
--- a/var/spack/repos/builtin/packages/py-pyasn1/package.py
+++ b/var/spack/repos/builtin/packages/py-pyasn1/package.py
@@ -14,6 +14,7 @@ class PyPyasn1(PythonPackage):
homepage = "https://github.com/etingof/pyasn1"
pypi = "pyasn1/pyasn1-0.4.6.tar.gz"
+ version('0.4.8', sha256='aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba')
version('0.4.6', sha256='b773d5c9196ffbc3a1e13bdf909d446cad80a039aa3340bcad72f395b76ebc86')
version('0.4.5', sha256='da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7')
version('0.2.3', sha256='738c4ebd88a718e700ee35c8d129acce2286542daa80a82823a7073644f706ad')
diff --git a/var/spack/repos/builtin/packages/py-pybids/package.py b/var/spack/repos/builtin/packages/py-pybids/package.py
index cffbc78446..f1a3a28d4f 100644
--- a/var/spack/repos/builtin/packages/py-pybids/package.py
+++ b/var/spack/repos/builtin/packages/py-pybids/package.py
@@ -10,12 +10,14 @@ class PyPybids(PythonPackage):
"""bids: interface with datasets conforming to BIDS"""
homepage = "https://github.com/bids-standard/pybids"
- pypi = "pybids/pybids-0.13.1.tar.gz"
+ pypi = "pybids/pybids-0.13.1.tar.gz"
+ version('0.14.0', sha256='73c4d03aad333f2a7cb4405abe96f55a33cffa4b5a2d23fad6ac5767c45562ef')
version('0.13.2', sha256='9692013af3b86b096b5423b88179c6c9b604baff5a6b6f89ba5f40429feb7a3e')
version('0.13.1', sha256='c920e1557e1dae8b671625d70cafbdc28437ba2822b2db9da4c2587a7625e3ba')
version('0.9.5', sha256='0e8f8466067ff3023f53661c390c02702fcd5fe712bdd5bf167ffb0c2b920430')
+ depends_on('python@3.6:', when='@0.14:', type=('build', 'run'))
depends_on('python@3.5:', when='@0.10:', type=('build', 'run'))
depends_on('python@2.7:2,3.5:', type=('build', 'run'))
depends_on('py-setuptools@30.3.0:', type='build')
@@ -23,9 +25,11 @@ class PyPybids(PythonPackage):
depends_on('py-scipy', type=('build', 'run'))
depends_on('py-nibabel@2.1:', type=('build', 'run'))
depends_on('py-pandas@0.23:', type=('build', 'run'))
- depends_on('py-patsy', type=('build', 'run'))
+ depends_on('py-formulaic@0.2.4:0.2', when='@0.14:', type=('build', 'run'))
depends_on('py-sqlalchemy@:1.3', when='@0.12.4:', type=('build', 'run'))
depends_on('py-sqlalchemy', type=('build', 'run'))
depends_on('py-bids-validator', type=('build', 'run'))
depends_on('py-num2words', type=('build', 'run'))
depends_on('py-click', when='@0.12.1:', type=('build', 'run'))
+
+ depends_on('py-patsy', when='@:0.13', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pybind11/package.py b/var/spack/repos/builtin/packages/py-pybind11/package.py
index a1ebcec3da..43359ee039 100644
--- a/var/spack/repos/builtin/packages/py-pybind11/package.py
+++ b/var/spack/repos/builtin/packages/py-pybind11/package.py
@@ -45,6 +45,7 @@ class PyPybind11(CMakePackage, PythonPackage):
depends_on('py-setuptools', type='build')
depends_on('py-pytest', type='test')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
+ depends_on('cmake@3.13:', type='build')
# compiler support
conflicts('%gcc@:4.7')
diff --git a/var/spack/repos/builtin/packages/py-pybrain/package.py b/var/spack/repos/builtin/packages/py-pybrain/package.py
new file mode 100644
index 0000000000..f102e79246
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pybrain/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPybrain(PythonPackage):
+ """PyBrain is the Swiss army knife for neural networking."""
+
+ homepage = "http://pybrain.org/"
+
+ url = "https://github.com/pybrain/pybrain/archive/refs/tags/0.3.3.tar.gz"
+ git = "https://github.com/pybrain/pybrain.git"
+
+ version('0.3.3.post', commit='dcdf32b')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-scipy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pycortex/package.py b/var/spack/repos/builtin/packages/py-pycortex/package.py
new file mode 100644
index 0000000000..158c600114
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pycortex/package.py
@@ -0,0 +1,53 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPycortex(PythonPackage):
+ """Python Cortical mapping software for fMRI data."""
+
+ # When pycortex is started it creates a user config file (on linux located
+ # in ~/.config/pycortex) which can be problematic when reinstalling a newer
+ # version with spack due to hardscoded absolute paths of the pycortex module
+
+ homepage = "https://github.com/gallantlab/pycortex"
+ pypi = "pycortex/pycortex-1.2.2.tar.gz"
+
+ version('1.2.2', sha256='ac46ed6a1dc727c3126c2b5d7916fc0ac21a6510c32a5edcd3b8cfb7b2128414')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython', type='build')
+ depends_on('py-future', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-tornado@4.3:', type=('build', 'run'))
+ depends_on('py-shapely', type=('build', 'run'))
+ depends_on('py-lxml', type=('build', 'run'))
+ depends_on('py-html5lib', type=('build', 'run'))
+ depends_on('py-h5py', type=('build', 'run'))
+ depends_on('py-numexpr', type=('build', 'run'))
+ depends_on('py-cython', type=('build', 'run')) # is in install_requires
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('pil', type=('build', 'run'))
+ depends_on('py-nibabel', type=('build', 'run'))
+ depends_on('py-networkx@2.1:', type=('build', 'run'))
+ depends_on('py-imageio', type=('build', 'run'))
+ depends_on('py-wget', type=('build', 'run'))
+
+ # inkscape is not in spack
+ # TODO remove this patch and add inkscape dependency once it is in
+ def patch(self):
+ # remove inkscape dependency
+ filter_file('from .testing_utils import INKSCAPE_VERSION', '',
+ 'cortex/utils.py',
+ string=True)
+ filter_file('open_inkscape=True', 'open_inkscape=False',
+ 'cortex/utils.py',
+ string=True)
+ filter_file('from .testing_utils import INKSCAPE_VERSION',
+ 'INKSCAPE_VERSION = None',
+ 'cortex/svgoverlay.py',
+ string=True)
diff --git a/var/spack/repos/builtin/packages/py-pycuda/package.py b/var/spack/repos/builtin/packages/py-pycuda/package.py
index 34da30d1dd..c26e6afb20 100644
--- a/var/spack/repos/builtin/packages/py-pycuda/package.py
+++ b/var/spack/repos/builtin/packages/py-pycuda/package.py
@@ -14,6 +14,7 @@ class PyPycuda(PythonPackage):
homepage = "https://mathema.tician.de/software/pycuda/"
pypi = "pycuda/pycuda-2019.1.2.tar.gz"
+ version('2021.1', sha256='ab87312d0fc349d9c17294a087bb9615cffcf966ad7b115f5b051008a48dd6ed')
version('2020.1', sha256='effa3b99b55af67f3afba9b0d1b64b4a0add4dd6a33bdd6786df1aa4cc8761a5')
version('2019.1.2', sha256='ada56ce98a41f9f95fe18809f38afbae473a5c62d346cfa126a2d5477f24cc8a')
version('2016.1.2', sha256='a7dbdac7e2f0c0d2ad98f5f281d5a9d29d6673b3c20210e261b96e9a2d0b6e37')
@@ -36,8 +37,8 @@ class PyPycuda(PythonPackage):
depends_on('python@3.6:3', type=('build', 'run'), when='@2020.1:')
depends_on('py-numpy@1.6:', type=('build', 'run'))
depends_on('py-pytools@2011.2:', type=('build', 'run'))
- depends_on('py-six', type='run')
- depends_on('py-decorator@3.2.0:', type=('build', 'run'))
+ depends_on('py-six', type='run', when='@:2020.1')
+ depends_on('py-decorator@3.2.0:', type=('build', 'run'), when='@:2020.1')
depends_on('py-appdirs@1.4.0:', type=('build', 'run'))
depends_on('py-mako', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pycurl/package.py b/var/spack/repos/builtin/packages/py-pycurl/package.py
index b82974139b..7471808456 100644
--- a/var/spack/repos/builtin/packages/py-pycurl/package.py
+++ b/var/spack/repos/builtin/packages/py-pycurl/package.py
@@ -13,7 +13,10 @@ class PyPycurl(PythonPackage):
homepage = "http://pycurl.io/"
pypi = "pycurl/pycurl-7.43.0.tar.gz"
+ version('7.44.1', sha256='5bcef4d988b74b99653602101e17d8401338d596b9234d263c728a0c3df003e8')
version('7.43.0', sha256='aa975c19b79b6aa6c0518c0cc2ae33528900478f0b500531dbcdbf05beec584c')
depends_on('python@2.6:')
+ depends_on('python@3.5:', when='@7.44.1:')
+ depends_on('py-setuptools', when='@7.44.1:', type='build')
depends_on('curl@7.19.0:')
diff --git a/var/spack/repos/builtin/packages/py-pygdal/package.py b/var/spack/repos/builtin/packages/py-pygdal/package.py
index 97d91b4f14..97eb636f41 100644
--- a/var/spack/repos/builtin/packages/py-pygdal/package.py
+++ b/var/spack/repos/builtin/packages/py-pygdal/package.py
@@ -27,8 +27,6 @@ class PyPygdal(PythonPackage):
version('2.4.2.5', sha256='73386683c0b10ab43b6d64257fca2ba812f53ec61b268de8811565fd9ae9bacd')
version('2.4.1.6', sha256='5d1af98ad09f59e34e3b332cf20630b532b33c7120295aaaabbccebf58a11aa4')
version('2.4.0.6', sha256='728d11f3ecae0cd3493cd27dab599a0b6184f5504cc172d49400d88ea2b24a9c')
- version('1.11.5.3', sha256='746d13b73a284446a1b604772f869789eabfe6e69dee463f537da27845b29fa7')
- version('1.11.4.3', sha256='99d4b0c94d57ae50592924faaa65cc6a0c0892d83764e9f24ef9270c3a4b111a')
depends_on('python@3.6:', when='@3.3:', type='build')
depends_on('py-setuptools', type='build')
@@ -41,5 +39,3 @@ class PyPygdal(PythonPackage):
depends_on('gdal@2.4.2', type=('build', 'link', 'run'), when='@2.4.2.5')
depends_on('gdal@2.4.1', type=('build', 'link', 'run'), when='@2.4.1.6')
depends_on('gdal@2.4.0', type=('build', 'link', 'run'), when='@2.4.0.6')
- depends_on('gdal@1.11.5', type=('build', 'link', 'run'), when='@1.11.5.3')
- depends_on('gdal@1.11.4', type=('build', 'link', 'run'), when='@1.11.4.3')
diff --git a/var/spack/repos/builtin/packages/py-pygraphviz/package.py b/var/spack/repos/builtin/packages/py-pygraphviz/package.py
new file mode 100644
index 0000000000..072d36aa1f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pygraphviz/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPygraphviz(PythonPackage):
+ """Python interface to Graphviz"""
+
+ homepage = "https://pygraphviz.github.io/"
+ pypi = "pygraphviz/pygraphviz-1.7.zip"
+
+ maintainers = ['haralmha']
+
+ version('1.7', sha256='a7bec6609f37cf1e64898c59f075afd659106cf9356c5f387cecaa2e0cdb2304')
+
+ depends_on('python@3.7:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('graphviz')
diff --git a/var/spack/repos/builtin/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py
index d474418ae2..f0fd2fefd3 100644
--- a/var/spack/repos/builtin/packages/py-pylint/package.py
+++ b/var/spack/repos/builtin/packages/py-pylint/package.py
@@ -11,6 +11,12 @@ class PyPylint(PythonPackage):
pypi = "pylint/pylint-1.6.5.tar.gz"
+ import_modules = ['pylint', 'pylint.lint', 'pylint.extensions',
+ 'pylint.config', 'pylint.checkers', 'pylint.checkers.refactoring',
+ 'pylint.message', 'pylint.utils', 'pylint.pyreverse',
+ 'pylint.reporters', 'pylint.reporters.ureports']
+
+ version('2.11.1', sha256='2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436')
version('2.8.2', sha256='586d8fa9b1891f4b725f587ef267abe2a1bad89d6b184520c7f07a253dd6e217')
version('2.3.1', sha256='723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1')
version('2.3.0', sha256='ee80c7af4f127b2a480d83010c9f0e97beb8eaa652b78c2837d3ed30b12e1182')
@@ -30,7 +36,8 @@ class PyPylint(PythonPackage):
depends_on('py-astroid@1.6:1.9', type=('build', 'run'), when='@1.9.4')
depends_on('py-astroid@2.0:', type=('build', 'run'), when='@2.2.0:')
depends_on('py-astroid@2.2.0:2', type=('build', 'run'), when='@2.3.0:2.7')
- depends_on('py-astroid@2.5.6:2.6', type=('build', 'run'), when='@2.8.0:')
+ depends_on('py-astroid@2.5.6:2.6', type=('build', 'run'), when='@2.8.0:2.10')
+ depends_on('py-astroid@2.8.0:2.8', type=('build', 'run'), when='@2.11.1:')
depends_on('py-backports-functools-lru-cache', when='^python@:2.8', type=('build', 'run'))
depends_on('py-configparser', when='^python@:2.8', type=('build', 'run'))
@@ -46,3 +53,5 @@ class PyPylint(PythonPackage):
depends_on('py-singledispatch', when='^python@:3.3', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'), when='@1:2.3.1')
depends_on('py-toml@0.7.1:', type=('build', 'run'), when='@2.8.2:')
+ depends_on('py-platformdirs@2.2.0:', type=('build', 'run'), when='@2.11.1:')
+ depends_on('py-typing-extensions@3.10.0:', type=('build', 'run'), when='@2.11.1: ^python@:3.9')
diff --git a/var/spack/repos/builtin/packages/py-pymol/package.py b/var/spack/repos/builtin/packages/py-pymol/package.py
index 557caf7692..f62e0c76ab 100644
--- a/var/spack/repos/builtin/packages/py-pymol/package.py
+++ b/var/spack/repos/builtin/packages/py-pymol/package.py
@@ -15,29 +15,43 @@ class PyPymol(PythonPackage):
homepage = "https://pymol.org"
url = "https://github.com/schrodinger/pymol-open-source/archive/v2.4.0.tar.gz"
+ version('2.5.0', sha256='aa828bf5719bd9a14510118a93182a6e0cadc03a574ba1e327e1e9780a0e80b3')
version('2.4.0', sha256='5ede4ce2e8f53713c5ee64f5905b2d29bf01e4391da7e536ce8909d6b9116581')
version('2.3.0', sha256='62aa21fafd1db805c876f89466e47513809f8198395e1f00a5f5cc40d6f40ed0')
- depends_on('python+tkinter', type=('build', 'run'))
- depends_on('freetype', type=('build', 'run'))
- depends_on('glew', type=('build'))
- depends_on('glm', type=('build'))
- depends_on('py-numpy', type=('build', 'run'))
- depends_on('tcsh', type=('build', 'run'))
+ depends_on('python+tkinter@2.7:', type=('build', 'run'), when='@2.3.0:2.4.0')
+ depends_on('python+tkinter@3.6:', type=('build', 'run'), when='@2.5.0:')
+ depends_on('gl')
+ depends_on('glew')
+ depends_on('libpng')
+ depends_on('freetype')
+ depends_on('glm')
+ depends_on('libmmtf-cpp')
+ depends_on('msgpack-c@2.1.5:')
+ depends_on('netcdf-cxx4')
+ depends_on('libxml2')
+ depends_on('py-pmw-patched', type=('build', 'run'))
depends_on('py-pyqt5', type=('build', 'run'))
- depends_on('py-pmw', type=('build', 'run'))
- depends_on('libmmtf-cpp', type=('build', 'run', 'link'))
- depends_on('msgpack-c', type=('build', 'run'))
- depends_on('libpng@1.5.13', type=('build', 'run'))
-
- def setup_build_environment(self, env):
- include = []
- library = []
- for dep in self.spec.dependencies(deptype='link'):
- query = self.spec[dep.name]
- include.extend(query.headers.directories)
-
- env.set('CPATH', ':'.join(include))
- env.set('LIBRARY_PATH', ':'.join(library))
- env.set('PREFIX_PATH', self.spec['libpng'].prefix)
- env.prepend_path('PREFIX_PATH', self.spec['py-pyqt5'].prefix)
+ depends_on('py-numpy', type=('build', 'run'))
+
+ def install_args(self, spec, prefix):
+ args = super(PyPymol, self).install_args(spec, prefix)
+ args.append('--no-launcher')
+ return args
+
+ @run_after('install')
+ def install_launcher(self):
+ binpath = self.prefix.bin
+ mkdirp(self.prefix.bin)
+ fname = join_path(binpath, 'pymol')
+ script = join_path(self.prefix,
+ self.spec['python'].package.site_packages_dir,
+ 'pymol',
+ '__init__.py')
+
+ shebang = '#!/bin/sh\n'
+ fdata = 'exec {0} {1} \"$@\"'.format(self.spec['python'].command,
+ script)
+ with open(fname, 'w') as new:
+ new.write(shebang + fdata)
+ set_executable(fname)
diff --git a/var/spack/repos/builtin/packages/py-pymongo/package.py b/var/spack/repos/builtin/packages/py-pymongo/package.py
index 5804ffc1be..5d96981b42 100644
--- a/var/spack/repos/builtin/packages/py-pymongo/package.py
+++ b/var/spack/repos/builtin/packages/py-pymongo/package.py
@@ -17,6 +17,7 @@ class PyPymongo(PythonPackage):
pypi = "pymongo/pymongo-3.9.0.tar.gz"
+ version('3.12.1', sha256='704879b6a54c45ad76cea7c6789c1ae7185050acea7afd15b58318fa1932ed45')
version('3.9.0', sha256='4249c6ba45587b959292a727532826c5032d59171f923f7f823788f413c2a5a3')
version('3.6.0', sha256='c6de26d1e171cdc449745b82f1addbc873d105b8e7335097da991c0fc664a4a8')
version('3.3.0', sha256='3d45302fc2622fabf34356ba274c69df41285bac71bbd229f1587283b851b91e')
diff --git a/var/spack/repos/builtin/packages/py-pynisher/package.py b/var/spack/repos/builtin/packages/py-pynisher/package.py
new file mode 100644
index 0000000000..2892b1fa77
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pynisher/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPynisher(PythonPackage):
+ """A small Python library to limit the resources used by a
+ function by executing it inside a subprocess."""
+
+ homepage = "https://github.com/automl/pynisher"
+ pypi = "pynisher/pynisher-0.6.4.tar.gz"
+
+ version('0.6.4', sha256='111d91aad471375c0509a912415ff90053ef909100facf412511383af107c124')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-psutil', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pyparsing/package.py b/var/spack/repos/builtin/packages/py-pyparsing/package.py
index 76ad6fb0eb..c819e3d384 100644
--- a/var/spack/repos/builtin/packages/py-pyparsing/package.py
+++ b/var/spack/repos/builtin/packages/py-pyparsing/package.py
@@ -11,6 +11,7 @@ class PyPyparsing(PythonPackage):
homepage = "https://pyparsing-docs.readthedocs.io/en/latest/"
pypi = "pyparsing/pyparsing-2.4.2.tar.gz"
+ version('3.0.6', sha256='d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81')
version('2.4.7', sha256='c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1')
version('2.4.2', sha256='6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80')
version('2.4.0', sha256='1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a')
@@ -19,11 +20,8 @@ class PyPyparsing(PythonPackage):
version('2.1.10', sha256='811c3e7b0031021137fc83e051795025fcb98674d07eb8fe922ba4de53d39188')
version('2.0.3', sha256='06e729e1cbf5274703b1f47b6135ed8335999d547f9d8cf048b210fb8ebf844f')
- patch('setuptools-import.patch', when='@:2.1.10')
-
+ depends_on('python@3.6:', when='@3:', type=('build', 'run'))
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
- # Newer versions of setuptools require pyparsing. Although setuptools is an
- # optional dependency of pyparsing, if it is not found, setup.py will
- # fallback on distutils.core instead. Don't add a setuptools dependency
- # or we won't be able to bootstrap setuptools.
- depends_on('py-setuptools', type='build', when='@2.3.1:')
+ depends_on('py-setuptools', when='@2.1:', type='build')
+
+ import_modules = ['pyparsing']
diff --git a/var/spack/repos/builtin/packages/py-pyparsing/setuptools-import.patch b/var/spack/repos/builtin/packages/py-pyparsing/setuptools-import.patch
deleted file mode 100644
index a4ba828c23..0000000000
--- a/var/spack/repos/builtin/packages/py-pyparsing/setuptools-import.patch
+++ /dev/null
@@ -1,20 +0,0 @@
-diff --git a/setup.py b/setup.py
-index 82061c6..ff342af 100644
---- a/setup.py
-+++ b/setup.py
-@@ -1,7 +1,13 @@
- #!/usr/bin/env python
-
- """Setup script for the pyparsing module distribution."""
--from setuptools import setup
-+
-+# Setuptools depends on pyparsing (via packaging) as of version 34, so allow
-+# installing without it to avoid bootstrap problems.
-+try:
-+ from setuptools import setup
-+except ImportError:
-+ from distutils.core import setup
-
- import sys
- import os
-
diff --git a/var/spack/repos/builtin/packages/py-pyqt-builder/package.py b/var/spack/repos/builtin/packages/py-pyqt-builder/package.py
new file mode 100644
index 0000000000..1f27e4197a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pyqt-builder/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPyqtBuilder(PythonPackage):
+ """The PEP 517 compliant PyQt build system."""
+
+ homepage = "https://www.riverbankcomputing.com/hg/PyQt-builder/"
+ pypi = "PyQt-builder/PyQt-builder-1.12.2.tar.gz"
+
+ version('1.12.2', sha256='f62bb688d70e0afd88c413a8d994bda824e6cebd12b612902d1945c5a67edcd7')
+
+ depends_on('python@3.5:', type=('build', 'run'))
+ depends_on('py-setuptools@30.3:', type='build')
+ depends_on('py-packaging', type=('build', 'run'))
+ depends_on('py-sip@6.3:6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pyro4/package.py b/var/spack/repos/builtin/packages/py-pyro4/package.py
new file mode 100644
index 0000000000..fa58bb13a7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pyro4/package.py
@@ -0,0 +1,85 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# Package automatically generated using 'pip2spack' converter
+
+
+class PyPyro4(PythonPackage):
+ """
+ distributed object middleware for Python (RPC)
+ """
+
+ homepage = "http://pyro4.readthedocs.io"
+ pypi = 'Pyro4/Pyro4-4.81.tar.gz'
+ maintainers = ['liuyangzhuan']
+
+ version('4.81', sha256='e130da06478b813173b959f7013d134865e07fbf58cc5f1a2598f99479cdac5f')
+ version('4.80', sha256='46847ca703de3f483fbd0b2d22622f36eff03e6ef7ec7704d4ecaa3964cb2220')
+ version('4.79', sha256='b1eb34c9a1e63f731ca480f3e2c48169341a25a7504397badbaaab07e0f3241e')
+ version('4.78', sha256='b69200747c4c69bfa6fa8b917806b0a9ee7939daaf67ab9bb5ccac7e5179feee')
+ version('4.77', sha256='2bfe12a22f396474b0e57c898c7e2c561a8f850bf2055d8cf0f7119f0c7a523f')
+ version('4.76', sha256='ac1fda8d3fd9b5ff2cb8e7e400f95a1b1ae28c5df1aa82d1833a5a898e476334')
+ version('4.75', sha256='3897c0254046d4cb412a4d1a8f2f9c2c1c1ae643a24db07d0abdb51acdb8d7b5')
+ version('4.74', sha256='89ed7b12c162e5124f322f992f9506c44f5e1a379926cf01ee73ef810d3bf75f')
+ version('4.73', sha256='536b07a097d0619e7ab1effa3747fda177a24168d17a07a93ca9ac30977608f7')
+ version('4.72', sha256='2766b53db49f70b0d047fa6871aeb47484ba7e50cf53cfa37d26f87742c0b6a8')
+ version('4.71', sha256='78b686b584c180061fe3cfc3adcad4da46b3a7f42be1f9f0d7491cd006541cf3')
+ version('4.70', sha256='614dc4a7a79a861ee15215a6e60081950b2790b7b5cc91555ebeec75d8444aa5')
+ version('4.63', sha256='67d2b34156619ba37e92100af95aade8129dd2b7327eb05821d43887451f7d7b')
+ version('4.62', sha256='e301edfb2bc47768b7222a68cae8de8be796d1d9f61cdbd1af9039985ed5009c')
+ version('4.61', sha256='c465cb2ea2a90b887988d4249de8c0566bdfb16101fdc570e07e598a92e94d1e')
+ version('4.60', sha256='52fa5fe8173d234f57b6ca3214df3f34e88356c94081685db6249bff8f0b4f7f')
+ version('4.59', sha256='6a39dadbd2a83b6fd5ab7f5402f8a4befd467b5c0404b8610a8797f748b72a38')
+ version('4.58', sha256='2c6d133bcec6039a681475bc878ec98c598ccd33105c1994c7b5217932ee2c0c')
+ version('4.57', sha256='fb3bf07951c2942b5f955770d50c0152565f0da79a2c1a359cfe2062fe0a82b2')
+ version('4.56', sha256='a80c27e1debbd8d8725ee4a8f0d30cf831dde5e80b04bfa9c912932c4c13d6aa')
+ version('4.55', sha256='49a7a142542d87dde1cecc8d3ee048ec9481ba861d61234d219fadd06e6ced96')
+ version('4.54', sha256='aede879916c0f6e84e560b38af421c24cb5089b66c8f632aa5ac48b20ecde93a')
+ version('4.53', sha256='c6ca6461472a74a7608a2247413b66e951889351fcf8e9eed5d7232ae844b702')
+ version('4.52', sha256='449f4bdf8dcbaca90e6436eb40c4e860b0de47346e2c7735d0584496d28451e5')
+ version('4.51', sha256='d6508b8c70d612356a8ddbe486890b03d840c37b5f7cd8e9366bc4c0dd44d3e6')
+ version('4.50', sha256='cb199540c2ceae9d67d5f2b20dc002d93f909d5072c3da4381c119d7a4b6d1cf')
+ version('4.49', sha256='6ae7fb0ce9ae5ca6f1d32487d8606219e7296ae7d22e650e7f9db63399608b76')
+ version('4.48', sha256='3115def913cf6035000047bb270efefb55a25449a17ed392afde6fd531c82fd2')
+ version('4.47', sha256='9354b722f9f5965ade5839241c8d7ff06ec2fac678a2c9e197a63966da241c89')
+ version('4.46', sha256='165ed717275217448d786f9c15777eca889f5344d54eef9482996dfee01b668b')
+ version('4.45', sha256='e32d3f32e52d84e3456c0d389a115b5430a8bb14dd01336c627355a2f34dba78')
+ version('4.43', sha256='b6f924fa74f21d14c851450e157711914a402bfc2f3a880c1b2c275fd4cda6d6')
+ version('4.42', sha256='03951643015a1537ad82fbf99fba6e208007447404aab1a020dce7216120d32a')
+ version('4.41', sha256='3af4749140e9d4032632277ac19e7fd4761856d2df0f0643c574d1e7174a9703')
+ version('4.40', sha256='00423d3710f60b2da146075a59e17bfa837f556ed2c8acafe05bc209dcaac3e9')
+ version('4.39', sha256='39c6ca7f86b0f0bebfeada687a5a8b99f66470a52b0f815195ae63c683266f24')
+ version('4.38', sha256='837fb552f54e46e54a13fa03c321073ba8373715346c4bc7e522b2c82a2c75c9')
+ version('4.37', sha256='2c4c9e7c3dbace3c75524324b6a686381be37bebab89b5001c0670418cec89c7')
+ version('4.36', sha256='fcbfbe22b044440fab3d6cbee11d18532b63accefe9cc30b2c41994cdeb08829')
+ version('4.35', sha256='97ef658b96fa10bac3e01097b1e2b6630fea2b307081ec6f2ac00f85e6020178')
+ version('4.34', sha256='36886e660290aa5afd06f735f587717f7f366b3535b7b0d3082b4e99ded9dc37')
+ version('4.33', sha256='9c01202190b7cdebe629e13abb70f050f421139f8115d1626321f442a9f54df8')
+ version('4.32', sha256='736eb96801881a61b9da72dced2d49574067443545892355af94411392526902')
+ version('4.31', sha256='0fd9342a216299ff24761e641714c7bd3e42c364f277eb3600d40085f4ace6c3')
+ version('4.30', sha256='1b38a52dd89cc6aee145d23bd74f586c73268938c6f346b20583ee0242d7d170')
+ version('4.29', sha256='3a17eaea8055962ff35bb9117f0860243d7977c34cbfcafc76e8e26309e339cf')
+ version('4.28', sha256='a094cb12e4e328e8b3b06bb313212f1826208c107fa6b48cf02f0ccdc32b562b')
+ version('4.27', sha256='ee32544fb04e7f4a2d223b442b306bd67cc900b7e9b5917f0b33d1979e6db34f')
+ version('4.26', sha256='213145815f00b6855b1ba71c20e78fd1d3c41595fae270308483cdba8d3fcec6')
+ version('4.25', sha256='ac2b0123badcb76c63eb716fcd95e0ee4021d345b5db05fda19253c59e39b384')
+ version('4.24', sha256='24d2ceaabbd886981d0df56f8f7e5f7f1a9db173778baa4965605f6880c90eb8')
+ version('4.23', sha256='57d6feee20a565f9de3302376a2531cfda50755088442102963b16e6f70b2e3b')
+ version('4.22', sha256='d8f611f384edbd240006d8c0f56135e74199ab88e9416cfc78cf5472f1ff337d')
+ version('4.21', sha256='96bc4bdccab27d935a44f1d9a8df94986d4b3361f5ff9382e86300ed5b9fdfa2')
+ version('4.20', sha256='72d3fb6dc653e6ae36bd47f2667fbff3c587c72f8bfb3f0dcb1763ee86c906f8')
+ version('4.18', sha256='52d7f6e10c44475052ac8b6828ed6f8b728a1c5d7e674b441eb0e930029ea4cd')
+ version('4.17', sha256='1d0cecdd3340dca695d6f833830e7a59f937d4bedbcff53109abe66e5a65d22c')
+ version('4.16', sha256='6a996700b877d268b48f91f91e356d2a4b20cb12207c05943d04504f6a0de0c7')
+ version('4.15', sha256='7b9dc43d6be79e4e542b8520715cb3ab7f9095afccc93bce9cacc271c665bf7d')
+ version('4.14', sha256='90c4f84ae9932d66825c61af9cd67b0b2877b477c967812a5d6953d67f3b003d')
+ version('4.13', sha256='afbc6964e593e7efed3fa5c91af45c4491cfdb994e7fdbe285cbb3719162cb90')
+ version('4.12', sha256='69f1beeafbe8f27bdac18e29ce97dd63cc1bdf847ff221ed0a6f0042047fa237')
+ version('4.11', sha256='d84ccfe85b14b3cb086f98d70dbf05671d6cb8498bd6f20f0041d6010dd320da')
+ version('4.10', sha256='de74e5e020a8a26cd357f5917afb48f7e14e161ca58574a1c653441bdbe9711c')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-serpent@1.27:', type=('build', 'run'))
+ depends_on('py-selectors34', when='^python@:3.3', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pyrsistent/package.py b/var/spack/repos/builtin/packages/py-pyrsistent/package.py
index 765760efe5..6732c14481 100644
--- a/var/spack/repos/builtin/packages/py-pyrsistent/package.py
+++ b/var/spack/repos/builtin/packages/py-pyrsistent/package.py
@@ -14,8 +14,11 @@ class PyPyrsistent(PythonPackage):
homepage = "https://github.com/tobgu/pyrsistent/"
pypi = "pyrsistent/pyrsistent-0.15.7.tar.gz"
+ version('0.18.0', sha256='773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b')
version('0.15.7', sha256='cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280')
depends_on('python@2.7:2.8,3.5:', type=('build', 'link', 'run'))
+ depends_on('python@3.6:', type=('build', 'link', 'run'), when='@0.18.0:')
depends_on('py-setuptools', type='build')
- depends_on('py-six', type=('build', 'run'))
+ depends_on('py-setuptools@42:', type='build', when='@0.18.0:')
+ depends_on('py-six', type=('build', 'run'), when='@:0.17')
diff --git a/var/spack/repos/builtin/packages/py-pyscipopt/package.py b/var/spack/repos/builtin/packages/py-pyscipopt/package.py
new file mode 100644
index 0000000000..1bfed30d38
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pyscipopt/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPyscipopt(PythonPackage):
+ """Python interface for the SCIP Optimization Suite"""
+
+ homepage = "https://github.com/scipopt/PySCIPOpt"
+ pypi = "PySCIPOpt/PySCIPOpt-3.4.0.tar.gz"
+
+ version('3.4.0', sha256='8da4db57b21010e0d5a863292dd455c88dd71ecec12a8439171c213a8092f88a')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython', type='build')
+ depends_on('py-wheel', type='build')
+ depends_on('scipoptsuite')
diff --git a/var/spack/repos/builtin/packages/py-pyspellchecker/package.py b/var/spack/repos/builtin/packages/py-pyspellchecker/package.py
new file mode 100644
index 0000000000..61a112fc86
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pyspellchecker/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPyspellchecker(PythonPackage):
+ """Pure python spell checker based on work by Peter Norvig"""
+
+ homepage = "https://github.com/barrust/pyspellchecker"
+ pypi = "pyspellchecker/pyspellchecker-0.6.2.tar.gz"
+
+ version('0.6.2', sha256='af6a1d0393a175499475a873f31e52135f1efd5fc912c979101b795b3c2ee77f')
+
+ depends_on('python@3.0:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pysqlite3/package.py b/var/spack/repos/builtin/packages/py-pysqlite3/package.py
new file mode 100644
index 0000000000..72841002af
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pysqlite3/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPysqlite3(PythonPackage):
+ """DB-API 2.0 interface for Sqlite 3.x"""
+
+ homepage = "https://github.com/coleifer/pysqlite3"
+ pypi = "pysqlite3/pysqlite3-0.4.6.tar.gz"
+
+ version('0.4.6', sha256='7ec4d4c477fa96609c1517afbc33bf02747588e528e79c695de95907cea7bf30')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('sqlite', type=('build', 'link', 'run'))
+
+ def patch(self):
+ filter_file("^include_dirs *=.*",
+ "include_dirs = " + self.spec['sqlite'].headers.directories[0],
+ 'setup.cfg')
+ filter_file("^library_dirs *=.*",
+ "library_dirs = " + self.spec['sqlite'].libs.directories[0],
+ 'setup.cfg')
diff --git a/var/spack/repos/builtin/packages/py-pytest-cov/package.py b/var/spack/repos/builtin/packages/py-pytest-cov/package.py
index d0cd2f2fe3..b6bdbf8d8f 100644
--- a/var/spack/repos/builtin/packages/py-pytest-cov/package.py
+++ b/var/spack/repos/builtin/packages/py-pytest-cov/package.py
@@ -12,12 +12,16 @@ class PyPytestCov(PythonPackage):
homepage = "https://github.com/pytest-dev/pytest-cov"
pypi = "pytest-cov/pytest-cov-2.8.1.tar.gz"
+ version('3.0.0', sha256='e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470')
version('2.8.1', sha256='cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b')
version('2.3.1', sha256='fa0a212283cdf52e2eecc24dd6459bb7687cc29adb60cb84258fab73be8dda0f')
extends('python', ignore=r'bin/*')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@3.0.0:')
depends_on('py-setuptools', type='build')
depends_on('py-pytest@3.6:', type=('build', 'run'))
+ depends_on('py-pytest@4.6:', type=('build', 'run'), when='@3.0.0:')
depends_on('py-coverage@4.4:', type=('build', 'run'))
+ depends_on('py-coverage@5.2.1: +toml', type=('build', 'run'), when='@3.0.0:')
diff --git a/var/spack/repos/builtin/packages/py-pytest-runner/package.py b/var/spack/repos/builtin/packages/py-pytest-runner/package.py
index 1221464219..ade95a1810 100644
--- a/var/spack/repos/builtin/packages/py-pytest-runner/package.py
+++ b/var/spack/repos/builtin/packages/py-pytest-runner/package.py
@@ -12,8 +12,15 @@ class PyPytestRunner(PythonPackage):
homepage = "https://github.com/pytest-dev/pytest-runner"
pypi = "pytest-runner/pytest-runner-5.1.tar.gz"
+ version('5.3.1', sha256='0fce5b8dc68760f353979d99fdd6b3ad46330b6b1837e2077a89ebcf204aac91')
version('5.1', sha256='25a013c8d84f0ca60bb01bd11913a3bcab420f601f0f236de4423074af656e7a')
version('2.11.1', sha256='983a31eab45e375240e250161a556163bc8d250edaba97960909338c273a89b3')
+ # requirements from pyproject.toml are marked with *
+ depends_on('python@3.6:', when='@5.3:', type=('build', 'run'))
+ depends_on('py-setuptools@42:', when='@5.3:', type=('build', 'run')) # *
+ depends_on('py-setuptools@34.4:', when='@5:', type=('build', 'run')) # *
+ depends_on('py-setuptools@27.3:', when='@4.1:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
+ depends_on('py-setuptools-scm+toml@3.4.1:', when='@5.3:', type='build') # *
depends_on('py-setuptools-scm@1.15:', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pytest/package.py b/var/spack/repos/builtin/packages/py-pytest/package.py
index d736596c29..4a02e1a6fc 100644
--- a/var/spack/repos/builtin/packages/py-pytest/package.py
+++ b/var/spack/repos/builtin/packages/py-pytest/package.py
@@ -12,6 +12,7 @@ class PyPytest(PythonPackage):
homepage = "https://pytest.org/"
pypi = "pytest/pytest-5.2.1.tar.gz"
+ version('6.2.5', sha256='131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89')
version('6.2.4', sha256='50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b')
version('6.2.1', sha256='66e419b1899bc27346cb2c993e12c5e5e8daba9073c1fbce33b9807abc95c306')
version('6.1.1', sha256='8f593023c1a0f916110285b6efd7f99db07d59546e3d8c36fc60e2ab05d3be92')
@@ -40,7 +41,7 @@ class PyPytest(PythonPackage):
depends_on('py-setuptools@40.0:', when='@3.9.2:6.1', type=('build', 'run'))
depends_on('py-setuptools@30.3:', when='@3.9.0:3.9.1', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
- depends_on('py-setuptools-scm@3.4:', when='@6.2:', type='build')
+ depends_on('py-setuptools-scm@3.4: +toml', when='@6.2:', type='build')
depends_on('py-setuptools-scm', when='@3.1:', type='build')
# install_requires
@@ -49,7 +50,8 @@ class PyPytest(PythonPackage):
depends_on('py-attrs@17.2.0:', when='@3.3:3.4', type=('build', 'run'))
depends_on('py-iniconfig', when='@6.0:', type=('build', 'run'))
depends_on('py-packaging', when='@4.6:', type=('build', 'run'))
- depends_on('py-pluggy@0.12:0', when='@4.6:', type=('build', 'run'))
+ depends_on('py-pluggy@0.12:1', when='@6.2:', type=('build', 'run'))
+ depends_on('py-pluggy@0.12:0', when='@4.6:6.1', type=('build', 'run'))
depends_on('py-pluggy@0.9.0:0.9,0.11:0', when='@4.5.0:4.5', type=('build', 'run'))
depends_on('py-pluggy@0.11:', when='@4.4.2:4.4', type=('build', 'run'))
depends_on('py-pluggy@0.9:', when='@4.4.0:4.4.1', type=('build', 'run'))
@@ -62,9 +64,9 @@ class PyPytest(PythonPackage):
depends_on('py-py@1.4.33:1.4', when='@3.2.4', type=('build', 'run'))
depends_on('py-py@1.4.29:', when='@:3.1.1', type=('build', 'run'))
depends_on('py-toml', when='@6.0:', type=('build', 'run'))
- depends_on('py-atomicwrites@1.0:', when='@5.3: platform=win32', type=('build', 'run'))
+ depends_on('py-atomicwrites@1.0:', when='@5.3: platform=windows', type=('build', 'run'))
depends_on('py-atomicwrites@1.0:', when='@3.6:5.2', type=('build', 'run'))
- depends_on('py-colorama', when='platform=win32', type=('build', 'run'))
+ depends_on('py-colorama', when='platform=windows', type=('build', 'run'))
depends_on('py-importlib-metadata@0.12:', when='@4.6:5.0', type=('build', 'run'))
depends_on('py-importlib-metadata@0.12:', when='@5.1: ^python@:3.7', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-python-constraint/package.py b/var/spack/repos/builtin/packages/py-python-constraint/package.py
new file mode 100644
index 0000000000..1e79c487a0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-python-constraint/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class PyPythonConstraint(PythonPackage):
+ """Constraint Solving Problem resolver for Python"""
+
+ homepage = "https://github.com/python-constraint/python-constraint"
+ pypi = "python-constraint/python-constraint-1.4.0.tar.bz2"
+
+ version('1.4.0', sha256='501d6f17afe0032dfc6ea6c0f8acc12e44f992733f00e8538961031ef27ccb8e')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-python-daemon/package.py b/var/spack/repos/builtin/packages/py-python-daemon/package.py
index 8185918c75..32e5ff67ba 100644
--- a/var/spack/repos/builtin/packages/py-python-daemon/package.py
+++ b/var/spack/repos/builtin/packages/py-python-daemon/package.py
@@ -20,7 +20,11 @@ class PyPythonDaemon(PythonPackage):
"""
pypi = "python-daemon/python-daemon-2.0.5.tar.gz"
+ version('2.3.0', sha256='bda993f1623b1197699716d68d983bb580043cf2b8a66a01274d9b8297b0aeaf')
version('2.0.5', sha256='afde4fa433d94d007206ee31a0941d55b5eb232a5422b670aad628547b46bf68')
- depends_on("py-setuptools", type='build')
+ depends_on("py-setuptools", type=('build', 'run'))
depends_on("py-lockfile", type=('build', 'run'))
+ depends_on("py-lockfile@0.10:", type=('build', 'run'), when='@2.3.0:')
+ depends_on("py-docutils", type='build')
+ depends_on("py-twine", type='build')
diff --git a/var/spack/repos/builtin/packages/py-python-ldap/package.py b/var/spack/repos/builtin/packages/py-python-ldap/package.py
index ca12163460..7b07e7229e 100644
--- a/var/spack/repos/builtin/packages/py-python-ldap/package.py
+++ b/var/spack/repos/builtin/packages/py-python-ldap/package.py
@@ -14,12 +14,19 @@ class PyPythonLdap(PythonPackage):
homepage = "https://www.python-ldap.org/en/python-ldap-3.2.0/"
pypi = "python-ldap/python-ldap-3.2.0.tar.gz"
+ version('3.4.0', sha256='60464c8fc25e71e0fd40449a24eae482dcd0fb7fcf823e7de627a6525b3e0d12')
+ version('3.3.1', sha256='4711cacf013e298754abd70058ccc995758177fb425f1c2d30e71adfc1d00aa5')
version('3.2.0', sha256='7d1c4b15375a533564aad3d3deade789221e450052b21ebb9720fb822eccdb8e')
version('3.0.0', sha256='86746b912a2cd37a54b06c694f021b0c8556d4caeab75ef50435ada152e2fbe1')
- depends_on('openldap+client_only', type=('build', 'link', 'run'))
+ # See https://github.com/python-ldap/python-ldap/issues/432
+ depends_on('openldap+client_only @:2.4', type=('build', 'link', 'run'))
depends_on('py-setuptools', type='build')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('py-pyasn1@0.3.7:', type=('build', 'run'))
depends_on('py-pyasn1-modules@0.1.5:', type=('build', 'run'))
- depends_on('cyrus-sasl', type='link')
+ depends_on('cyrus-sasl', type='link', when='^openldap+sasl')
+
+ def patch(self):
+ if self.spec.satisfies('^openldap~sasl'):
+ filter_file('HAVE_SASL ', '', 'setup.cfg')
diff --git a/var/spack/repos/builtin/packages/py-python-rapidjson/package.py b/var/spack/repos/builtin/packages/py-python-rapidjson/package.py
index 0df7feb954..1ccae166a5 100644
--- a/var/spack/repos/builtin/packages/py-python-rapidjson/package.py
+++ b/var/spack/repos/builtin/packages/py-python-rapidjson/package.py
@@ -12,7 +12,9 @@ class PyPythonRapidjson(PythonPackage):
homepage = "https://github.com/python-rapidjson/python-rapidjson"
pypi = "python-rapidjson/python-rapidjson-0.9.1.tar.gz"
+ version('1.5', sha256='04323e63cf57f7ed927fd9bcb1861ef5ecb0d4d7213f2755969d4a1ac3c2de6f')
version('0.9.1', sha256='ad80bd7e4bb15d9705227630037a433e2e2a7982b54b51de2ebabdd1611394a1')
depends_on('python@3.4:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@1.5:')
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pythonqwt/package.py b/var/spack/repos/builtin/packages/py-pythonqwt/package.py
index 8f7678a6e3..ce6e0b93d7 100644
--- a/var/spack/repos/builtin/packages/py-pythonqwt/package.py
+++ b/var/spack/repos/builtin/packages/py-pythonqwt/package.py
@@ -14,7 +14,7 @@ class PyPythonqwt(PythonPackage):
version('0.5.5', sha256='1f13cc8b555a57f8fe0f806d6c2f6d847050e4d837649503932b81316d12788a')
- variant('doc', default=False, description="Build documentation.")
+ variant('docs', default=False, description="Build documentation.")
depends_on('py-setuptools', type='build')
depends_on('py-numpy@1.3:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pythonsollya/package.py b/var/spack/repos/builtin/packages/py-pythonsollya/package.py
index 02decf6b86..7e6e1f30fe 100644
--- a/var/spack/repos/builtin/packages/py-pythonsollya/package.py
+++ b/var/spack/repos/builtin/packages/py-pythonsollya/package.py
@@ -23,3 +23,10 @@ class PyPythonsollya(PythonPackage):
depends_on('sollya', type=('build', 'link'))
depends_on('py-bigfloat', type=('build', 'run'))
depends_on('mpfi', type=('build', 'link'))
+
+ @run_before('build')
+ def patch(self):
+ filter_file('PYTHON ?= python2',
+ 'PYTHON ?= ' + self.spec['python'].command.path,
+ 'GNUmakefile',
+ string=True)
diff --git a/var/spack/repos/builtin/packages/py-pythran/package.py b/var/spack/repos/builtin/packages/py-pythran/package.py
index 431347d26b..b8bacda888 100644
--- a/var/spack/repos/builtin/packages/py-pythran/package.py
+++ b/var/spack/repos/builtin/packages/py-pythran/package.py
@@ -14,6 +14,7 @@ class PyPythran(PythonPackage):
homepage = "https://github.com/serge-sans-paille/pythran"
pypi = "pythran/pythran-0.9.11.tar.gz"
+ version('0.10.0', sha256='9dac8e1d50f33d4676003e350b1f0c878ce113e6f907920e92dc103352cac5bf')
version('0.9.12', sha256='5d50dc74dca1d3f902941865acbae981fc24cceeb9d54673d68d6b5c8c1b0001')
version('0.9.11', sha256='a317f91e2aade9f6550dc3bf40b5caeb45b7e012daf27e2b3e4ad928edb01667')
version('0.9.10', sha256='8fa1d19624cb2950e5a18974fdcb0dffc57e1a821049dc95df09563edd673915')
@@ -28,7 +29,7 @@ class PyPythran(PythonPackage):
depends_on('python@3:', when='@0.9.6:', type=('build', 'run'))
depends_on('python@2.7:', when='@:0.9.5', type=('build', 'run'))
depends_on('py-setuptools', type='build')
- depends_on('py-pytest-runner', type='build')
+ depends_on('py-pytest-runner', type='build', when='@:0.9')
depends_on('py-ply@3.4:', type=('build', 'run'))
depends_on('py-networkx@2:', when='@:0.9.11', type=('build', 'run'))
depends_on('py-decorator', when='@:0.9.11', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pytools/package.py b/var/spack/repos/builtin/packages/py-pytools/package.py
index 22a380fa34..803677dce9 100644
--- a/var/spack/repos/builtin/packages/py-pytools/package.py
+++ b/var/spack/repos/builtin/packages/py-pytools/package.py
@@ -11,12 +11,15 @@ class PyPytools(PythonPackage):
pypi = "pytools/pytools-2019.1.1.tar.gz"
+ version('2021.2.9', sha256='db6cf83c9ba0a165d545029e2301621486d1e9ef295684072e5cd75316a13755')
version('2019.1.1', sha256='ce2d702ae4ef10a70197b00b93141461140d00578f2a862fa946ca1446a300db')
version('2016.2.6', sha256='6dd49932b8f81a8b622685cff3dd515e351a9290aef0fd5d020e4df00c06aa95')
depends_on('py-setuptools', type='build')
- depends_on('py-decorator@3.2.0:', type=('build', 'run'))
+ depends_on('py-decorator@3.2.0:', type=('build', 'run'), when='@:2019.1.1')
depends_on('py-appdirs@1.4.0:', type=('build', 'run'))
- depends_on('py-six@1.8.0:', type=('build', 'run'))
+ depends_on('py-six@1.8.0:', type=('build', 'run'), when='@:2019.1.1')
depends_on('py-numpy@1.6.0:', type=('build', 'run'))
+ depends_on('py-dataclasses@0.7:', type=('build', 'run'), when='@2021.2.9: ^python@:3.6')
depends_on('python@2.6:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@3.6:3', type=('build', 'run'), when='@2021.2.9:')
diff --git a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py
index 6816c72668..001ab45d08 100644
--- a/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py
+++ b/var/spack/repos/builtin/packages/py-pytorch-lightning/package.py
@@ -12,6 +12,7 @@ class PyPytorchLightning(PythonPackage):
homepage = "https://github.com/PyTorchLightning/pytorch-lightning"
pypi = "pytorch-lightning/pytorch-lightning-1.2.10.tar.gz"
+ version('1.5.3', sha256='a206169a0c4356366a7edadb5ebd2f38e9a611ff78265ce93b767662682f5620')
version('1.4.1', sha256='1d1128aeb5d0e523d2204c4d9399d65c4e5f41ff0370e96d694a823af5e8e6f3')
version('1.4.0', sha256='6529cf064f9dc323c94f3ce84b56ee1a05db1b0ab17db77c4d15aa36e34da81f')
version('1.3.8', sha256='60b0a3e464d394864dae4c8d251afa7aa453644a19bb7672f5ee400343cdf7b0')
@@ -30,7 +31,9 @@ class PyPytorchLightning(PythonPackage):
depends_on('py-pyyaml@5.1:5.3,5.5:', when='@:1.2', type=('build', 'run'))
depends_on('py-fsspec@2021.05.0:2021.05,2021.06.1:+http', when='@1.3:', type=('build', 'run'))
depends_on('py-fsspec@0.8.1:+http', when='@:1.2', type=('build', 'run'))
- depends_on('py-tensorboard@2.2.0:2.4,2.5.1:', type=('build', 'run'))
+ depends_on('py-tensorboard@2.2.0:', when='@1.5:', type=('build', 'run'))
+ depends_on('py-tensorboard@2.2.0:2.4,2.5.1:', when='@:1.4', type=('build', 'run'))
+ depends_on('py-torchmetrics@0.4.1:', when='@1.5:', type=('build', 'run'))
depends_on('py-torchmetrics@0.4.0:', when='@1.4:', type=('build', 'run'))
depends_on('py-torchmetrics@0.2.0:', when='@1.3', type=('build', 'run'))
depends_on('py-torchmetrics@0.2.0', when='@:1.2', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pyyaml/package.py b/var/spack/repos/builtin/packages/py-pyyaml/package.py
index 1c5e7d02eb..b68af04549 100644
--- a/var/spack/repos/builtin/packages/py-pyyaml/package.py
+++ b/var/spack/repos/builtin/packages/py-pyyaml/package.py
@@ -15,6 +15,7 @@ class PyPyyaml(PythonPackage):
maintainers = ['adamjstewart']
+ version('6.0', sha256='68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2')
version('5.3.1', sha256='b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d')
version('5.1.2', sha256='01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4')
version('5.1', sha256='436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95')
@@ -25,7 +26,10 @@ class PyPyyaml(PythonPackage):
variant('libyaml', default=True, description='Use libYAML bindings')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@6.0:')
depends_on('libyaml', when='+libyaml')
+ depends_on('py-setuptools', when='@6.0:')
+ depends_on('py-cython', when='@6.0: +libyaml')
phases = ['build_ext', 'install']
@@ -39,20 +43,21 @@ class PyPyyaml(PythonPackage):
return modules
def setup_py(self, *args, **kwargs):
- # Cast from tuple to list
- args = list(args)
+ if self.spec.satisfies('@:5'):
+ # Cast from tuple to list
+ args = list(args)
- if '+libyaml' in self.spec:
- args.insert(0, '--with-libyaml')
- else:
- args.insert(0, '--without-libyaml')
+ if '+libyaml' in self.spec:
+ args.insert(0, '--with-libyaml')
+ else:
+ args.insert(0, '--without-libyaml')
super(PyPyyaml, self).setup_py(*args, **kwargs)
def build_ext_args(self, spec, prefix):
args = []
- if '+libyaml' in spec:
+ if spec.satisfies('@:5 +libyaml'):
args.extend([
spec['libyaml'].libs.search_flags,
spec['libyaml'].headers.include_flags,
diff --git a/var/spack/repos/builtin/packages/py-pyzmq/package.py b/var/spack/repos/builtin/packages/py-pyzmq/package.py
index 9230b285e3..74d894cd8a 100644
--- a/var/spack/repos/builtin/packages/py-pyzmq/package.py
+++ b/var/spack/repos/builtin/packages/py-pyzmq/package.py
@@ -9,7 +9,7 @@ from spack import *
class PyPyzmq(PythonPackage):
"""PyZMQ: Python bindings for zeromq."""
homepage = "https://github.com/zeromq/pyzmq"
- url = "https://github.com/zeromq/pyzmq/archive/v14.7.0.tar.gz"
+ pypi = "pyzmq/pyzmq-22.3.0.tar.gz"
import_modules = [
'zmq', 'zmq.green', 'zmq.green.eventloop', 'zmq.sugar', 'zmq.auth',
@@ -19,19 +19,45 @@ class PyPyzmq(PythonPackage):
'zmq.log', 'zmq.asyncio', 'zmq.devices'
]
- version('18.1.0', sha256='32f7618b8104021bc96cbd60be4330bdf37b929e8061dbce362c9f3478a08e21')
- version('18.0.1', sha256='7b0107992d8cc4c43d9af1c2e13d573ea761c7feb23d7e0e7da9dc963811e68f')
- version('17.1.2', sha256='77a32350440e321466b1748e6063b34a8a73768b62cb674e7d799fbc654b7c45')
- version('16.0.2', sha256='717dd902c3cf432b1c68e7b299ad028b0de0d0a823858e440b81d5f1baa2b1c1')
- version('14.7.0', sha256='809a5fcc720d286c840f7f64696e60322b5b2544795a73db626f09b344d16a15')
+ version('22.3.0', sha256='8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c')
+ version('18.1.0', sha256='93f44739db69234c013a16990e43db1aa0af3cf5a4b8b377d028ff24515fbeb3')
+ version('18.0.1', sha256='8b319805f6f7c907b101c864c3ca6cefc9db8ce0791356f180b1b644c7347e4c')
+ version('17.1.2', sha256='a72b82ac1910f2cf61a49139f4974f994984475f771b0faa730839607eeedddf')
+ version('16.0.2', sha256='0322543fff5ab6f87d11a8a099c4c07dd8a1719040084b6ce9162bcdf5c45c9d')
+ version('14.7.0', sha256='77994f80360488e7153e64e5959dc5471531d1648e3a4bff14a714d074a38cc2')
- depends_on('python@2.7:2.8,3.3:', type=('build', 'run'), when='@18:')
+ # Python 3.9 build issues
+ depends_on('python@3.6:3.8', type=('build', 'run'), when='@22:')
+ depends_on('python@2.7,3.3:3.8', type=('build', 'run'), when='@16:18')
+ depends_on('python@2.6:2.7,3.2:3.8', type=('build', 'run'), when='@:14')
depends_on('py-cython@0.16:', type='build')
depends_on('py-cython@0.20:', type='build', when='@18:')
- depends_on('py-py', type=('build', 'run'))
- depends_on('py-cffi', type=('build', 'run'))
+ depends_on('py-cython@0.29:', type='build', when='@22.3.0:')
depends_on('py-gevent', type=('build', 'run'))
- depends_on('libzmq')
+ depends_on('libzmq', type=('build', 'run'))
+ depends_on('libzmq@3.2:', type=('build', 'run'), when='@22.3.0:')
+ depends_on('py-setuptools', type='build', when='@22.3.0:')
+ # Only when python is provided by 'pypy'
+ depends_on('py-py', type=('build', 'run'), when='@:22')
+ depends_on('py-cffi', type=('build', 'run'), when='@:22')
+
+ @run_before('install')
+ def setup(self):
+ """Create config file listing dependency information."""
+
+ with open('setup.cfg', 'w') as config:
+ config.write("""\
+[global]
+zmq_prefix = {0}
+
+[build_ext]
+library_dirs = {1}
+include_dirs = {2}
+""".format(
+ self.spec['libzmq'].prefix,
+ self.spec['libzmq'].libs.directories[0],
+ self.spec['libzmq'].headers.directories[0],
+ ))
def setup_build_environment(self, env):
# Needed for `spack install --test=root py-pyzmq`
diff --git a/var/spack/repos/builtin/packages/py-qiskit-aer/package.py b/var/spack/repos/builtin/packages/py-qiskit-aer/package.py
new file mode 100644
index 0000000000..9f4b6af754
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-qiskit-aer/package.py
@@ -0,0 +1,56 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class PyQiskitAer(PythonPackage, CudaPackage):
+ """Aer is a high performance simulator for quantum circuits that
+ includes noise models"""
+
+ homepage = "https://github.com/Qiskit/qiskit-aer"
+ pypi = "qiskit-aer/qiskit-aer-0.9.1.tar.gz"
+
+ version('0.9.1', sha256='3bf5f615aaae7cc5f816c39a4e9108aabaed0cc894fb6f841e48ffd56574e7eb')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@40.1.0:', type='build')
+ depends_on('py-numpy@1.16.3:', type=('build', 'run'))
+ depends_on('py-pybind11@2.6:', type='build')
+ depends_on('py-qiskit-terra@0.17.0:', type=('build', 'run'))
+ depends_on('py-scipy@1.0:', type=('build', 'run'))
+ depends_on('py-scikit-build@0.11.0:', type='build')
+ depends_on('py-cmake@:3.16,3.18:', type='build')
+ depends_on('mpi', when='+mpi')
+ depends_on('nlohmann-json@3.1.1:')
+ depends_on('spdlog@1.5.0:')
+ depends_on('muparserx@4.0.8:')
+ depends_on('blas')
+ depends_on('cuda@10.1:', when='+cuda')
+
+ variant('mpi', default=True, description='Enable MPI support')
+ variant('gdr', default=True, description='Enable GDR support')
+
+ def setup_build_environment(self, env):
+ env.set('DISABLE_CONAN', 'ON')
+ env.set('DISABLE_DEPENDENCY_INSTALL', '1')
+ env.set("CUDAHOSTCXX", spack_cxx)
+
+ def build_args(self, spec, prefix):
+ args = []
+ args.append('-DDISABLE_CONAN=ON')
+ if '~gdr' in self.spec:
+ args.append('-DAER_DISABLE_GDR=True')
+ else:
+ args.append('-DAER_DISABLE_GDR=False')
+ if '+mpi' in self.spec:
+ args.append('-DAER_MPI=True')
+ else:
+ args.append('-DAER_MPI=False')
+ if '+cuda' in self.spec:
+ args.append('-DAER_THRUST_BACKEND=CUDA')
+ cuda_archs = spec.variants['cuda_arch'].value
+ if 'none' not in cuda_archs:
+ args.append('-DCUDA_NVCC_FLAGS={0}'.
+ format(' '.join(self.cuda_flags(cuda_archs))))
+ return args
diff --git a/var/spack/repos/builtin/packages/py-qiskit-terra/package.py b/var/spack/repos/builtin/packages/py-qiskit-terra/package.py
new file mode 100644
index 0000000000..fbbef84f37
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-qiskit-terra/package.py
@@ -0,0 +1,34 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class PyQiskitTerra(PythonPackage):
+ """Qiskit is an open-source SDK for working with quantum computers
+ at the level of extended quantum circuits, operators, and
+ algorithms."""
+
+ homepage = "https://github.com/Qiskit/qiskit-terra"
+ pypi = "qiskit-terra/qiskit-terra-0.18.3.tar.gz"
+
+ version('0.18.3', sha256='8737c8f1f4c6f29ec2fb02d73023f4854a396c33f78f4629a861a3e48fc789cc')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython@0.27.1:', type='build')
+
+ depends_on('py-contextvars@2.4:', when='^python@:3.6', type=('build', 'run'))
+ depends_on('py-jsonschema@2.6:', type=('build', 'run'))
+ depends_on('py-retworkx@0.9.0:', type=('build', 'run'))
+ depends_on('py-numpy@1.17:', type=('build', 'run'))
+ depends_on('py-scipy@1.4:', type=('build', 'run'))
+ depends_on('py-ply@3.10:', type=('build', 'run'))
+ depends_on('py-psutil@5:', type=('build', 'run'))
+ depends_on('py-sympy@1.3:', type=('build', 'run'))
+ depends_on('py-dill@0.3:', type=('build', 'run'))
+ depends_on('py-fastjsonschema@2.10:', type=('build', 'run'))
+ depends_on('py-python-constraint@1.4:', type=('build', 'run'))
+ depends_on('py-python-dateutil@2.8.0:', type=('build', 'run'))
+ depends_on('py-symengine@0.7:', type=('build', 'run'))
+ depends_on('py-tweedledum@1.1:1', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-qpth/package.py b/var/spack/repos/builtin/packages/py-qpth/package.py
new file mode 100644
index 0000000000..a8c54f91e5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-qpth/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyQpth(PythonPackage):
+ """A fast and differentiable QP solver for PyTorch"""
+
+ homepage = "https://github.com/locuslab/qpth"
+ pypi = "qpth/qpth-0.0.15.tar.gz"
+
+ version('0.0.15', sha256='99d8ec5a35877c18543875a7d5b7fc9af1fa9a4d4b0888011c1ecf42ad9d521c')
+
+ depends_on('python', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1')
+ depends_on('py-torch')
+ depends_on('py-cvxpy')
diff --git a/var/spack/repos/builtin/packages/py-qtconsole/package.py b/var/spack/repos/builtin/packages/py-qtconsole/package.py
index 8ff12768b1..11b5295e67 100644
--- a/var/spack/repos/builtin/packages/py-qtconsole/package.py
+++ b/var/spack/repos/builtin/packages/py-qtconsole/package.py
@@ -12,12 +12,14 @@ class PyQtconsole(PythonPackage):
homepage = "https://ipython.org"
pypi = "qtconsole/qtconsole-4.2.1.tar.gz"
+ version('5.2.0', sha256='6bb4df839609f240194213407872076f871e3a3884cf8e785068e8c7f39344c6')
version('4.5.1', sha256='4af84facdd6f00a6b9b2927255f717bb23ae4b7a20ba1d9ef0a5a5a8dbe01ae2')
version('4.2.1', sha256='25ec7d345528b3e8f3c91be349dd3c699755f206dc4b6ec668e2e5dd60ea18ef')
variant('docs', default=False, description='Build documentation')
depends_on('python@2.7:2.8,3.3:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@5.2.0:')
depends_on('py-ipykernel@4.1:', type=('build', 'run'))
depends_on('py-jupyter-client@4.1:', type=('build', 'run'))
depends_on('py-jupyter-core', type=('build', 'run'))
@@ -25,4 +27,6 @@ class PyQtconsole(PythonPackage):
depends_on('py-traitlets', type=('build', 'run'))
depends_on('py-ipython-genutils', type=('build', 'run'), when='@4.5.1:')
depends_on('py-sphinx@1.3:', type=('build', 'run'), when='+docs')
- depends_on('py-pyqt5', type='run')
+ depends_on('py-pyqt5', type='run', when='@:5.1')
+ depends_on('py-qtpy', type=('build', 'run'), when='@5.2.0:')
+ depends_on('py-pyzmq@17.1:', type=('build', 'run'), when='@5.2.0:')
diff --git a/var/spack/repos/builtin/packages/py-radical-entk/package.py b/var/spack/repos/builtin/packages/py-radical-entk/package.py
index 03d3592a23..96a01fb5f2 100755..100644
--- a/var/spack/repos/builtin/packages/py-radical-entk/package.py
+++ b/var/spack/repos/builtin/packages/py-radical-entk/package.py
@@ -12,11 +12,12 @@ class PyRadicalEntk(PythonPackage):
homepage = 'https://radical-cybertools.github.io'
git = 'https://github.com/radical-cybertools/radical.entk.git'
- pypi = 'radical.entk/radical.entk-1.8.0.tar.gz'
+ pypi = 'radical.entk/radical.entk-1.9.0.tar.gz'
maintainers = ['andre-merzky']
version('develop', branch='devel')
+ version('1.9.0', sha256='918c716ac5eecb012a57452f45f5a064af7ea72f70765c7b0c60be4322b23557')
version('1.8.0', sha256='47a3f7f1409612d015a3e6633853d31ec4e4b0681aecb7554be16ebf39c7f756')
version('1.6.7', sha256='9384568279d29b9619a565c075f287a08bca8365e2af55e520af0c2f3595f8a2')
diff --git a/var/spack/repos/builtin/packages/py-radical-gtod/package.py b/var/spack/repos/builtin/packages/py-radical-gtod/package.py
index 4e8eda7ea2..4e8eda7ea2 100755..100644
--- a/var/spack/repos/builtin/packages/py-radical-gtod/package.py
+++ b/var/spack/repos/builtin/packages/py-radical-gtod/package.py
diff --git a/var/spack/repos/builtin/packages/py-radical-pilot/package.py b/var/spack/repos/builtin/packages/py-radical-pilot/package.py
index aeca8fb11f..9ced1d9a85 100755..100644
--- a/var/spack/repos/builtin/packages/py-radical-pilot/package.py
+++ b/var/spack/repos/builtin/packages/py-radical-pilot/package.py
@@ -13,11 +13,13 @@ class PyRadicalPilot(PythonPackage):
homepage = 'https://radical-cybertools.github.io'
git = 'https://github.com/radical-cybertools/radical.pilot.git'
- pypi = 'radical.pilot/radical.pilot-1.8.0.tar.gz'
+ pypi = 'radical.pilot/radical.pilot-1.10.1.tar.gz'
maintainers = ['andre-merzky']
version('develop', branch='devel')
+ version('1.10.1', sha256='003f4c519b991bded31693026b69dd51547a5a69a5f94355dc8beff766524b3c')
+ version('1.9.2', sha256='7c872ac9103a2aed0c5cd46057048a182f672191e194e0fd42794b0012e6e947')
version('1.8.0', sha256='a4c3bca163db61206e15a2d820d9a64e888da5c72672448ae975c26768130b9d')
version('1.6.8', sha256='fa8fd3f348a68b54ee8338d5c5cf1a3d99c10c0b6da804424a839239ee0d313d')
version('1.6.7', sha256='6ca0a3bd3cda65034fa756f37fa05681d5a43441c1605408a58364f89c627970')
@@ -26,6 +28,6 @@ class PyRadicalPilot(PythonPackage):
depends_on('py-radical-saga@1.6.6:', type=('build', 'run'))
depends_on('python@3.6:', type=('build', 'run'))
- depends_on('py-pymongo', type=('build', 'run'))
+ depends_on('py-pymongo@:3', type=('build', 'run'))
depends_on('py-setproctitle', type=('build', 'run'))
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-radical-saga/package.py b/var/spack/repos/builtin/packages/py-radical-saga/package.py
index 81c30e2181..81c30e2181 100755..100644
--- a/var/spack/repos/builtin/packages/py-radical-saga/package.py
+++ b/var/spack/repos/builtin/packages/py-radical-saga/package.py
diff --git a/var/spack/repos/builtin/packages/py-radical-utils/package.py b/var/spack/repos/builtin/packages/py-radical-utils/package.py
index 6009e01c28..e285f2f559 100755..100644
--- a/var/spack/repos/builtin/packages/py-radical-utils/package.py
+++ b/var/spack/repos/builtin/packages/py-radical-utils/package.py
@@ -12,11 +12,13 @@ class PyRadicalUtils(PythonPackage):
homepage = 'https://radical-cybertools.github.io'
git = 'https://github.com/radical-cybertools/radical.utils.git'
- pypi = 'radical.utils/radical.utils-1.8.0.tar.gz'
+ pypi = 'radical.utils/radical.utils-1.9.1.tar.gz'
maintainers = ['andre-merzky']
version('develop', branch='devel')
+ version('1.9.1', sha256='0837d75e7f9dcce5ba5ac63151ab1683d6ba9ab3954b076d1f170cc4a3cdb1b4')
+ version('1.8.4', sha256='4777ba20e9f881bf3e73ad917638fdeca5a4b253d57ed7b321a07f670e3f737b')
version('1.8.0', sha256='8582c65593f51d394fc263c6354ec5ad9cc7173369dcedfb2eef4f5e8146cf03')
version('1.6.7', sha256='552f6c282f960ccd9d2401d686b0b3bfab35dfa94a26baeb2d3b4e45211f05a9')
@@ -30,7 +32,7 @@ class PyRadicalUtils(PythonPackage):
depends_on('py-msgpack', type=('build', 'run'))
depends_on('py-netifaces', type=('build', 'run'))
depends_on('py-ntplib', type=('build', 'run'))
- depends_on('py-pymongo', type=('build', 'run'))
+ depends_on('py-pymongo@:3', type=('build', 'run'))
depends_on('py-pyzmq', type=('build', 'run'))
depends_on('py-regex', type=('build', 'run'))
depends_on('py-setproctitle', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py
new file mode 100644
index 0000000000..3d8e07c209
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyRapidfuzz(PythonPackage):
+ """Rapid fuzzy string matching in Python and C++ using the Levenshtein Distance."""
+
+ homepage = "https://github.com/maxbachmann/rapidfuzz"
+ pypi = "rapidfuzz/rapidfuzz-1.8.2.tar.gz"
+
+ version('1.8.2', sha256='d6efbb2b6b18b3a67d7bdfbcd9bb72732f55736852bbef823bdf210f9e0c6c90')
+
+ depends_on('python@2.7:', type=('build', 'link', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-rdt/package.py b/var/spack/repos/builtin/packages/py-rdt/package.py
new file mode 100644
index 0000000000..0f634a8b77
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-rdt/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyRdt(PythonPackage):
+ """RDT is a Python library used to transform data for data
+ science libraries and preserve the transformations in order
+ to revert them as needed."""
+
+ homepage = "https://github.com/sdv-dev/RDT"
+ pypi = "rdt/rdt-0.6.1.tar.gz"
+
+ version('0.6.1', sha256='ee2ac0d3479b254f99f35a709a24ffd5f2c899de6ea71f1ee844c6113febba71')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.18:1.19', type=('build', 'run'), when='^python@3.6')
+ depends_on('py-numpy@1.20:1', type=('build', 'run'), when='^python@3.7:')
+ depends_on('py-pandas@1.1.3:1.1.4', type=('build', 'run'))
+ depends_on('py-scipy@1.5.4:1', type=('build', 'run'))
+ depends_on('py-psutil@5.7:5', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py b/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py
index 09d8de87e5..04c2ca4606 100644
--- a/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py
+++ b/var/spack/repos/builtin/packages/py-requests-oauthlib/package.py
@@ -13,6 +13,7 @@ class PyRequestsOauthlib(PythonPackage):
homepage = "https://github.com/requests/requests-oauthlib"
pypi = "requests-oauthlib/requests-oauthlib-1.2.0.tar.gz"
+ version('1.3.0', sha256='b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a')
version('1.2.0', sha256='bd6533330e8748e94bf0b214775fed487d309b8b8fe823dc45641ebcd9a32f57')
version('0.3.3', sha256='37557b4de3eef50d2a4c65dc9382148b8331f04b1c637c414b3355feb0f007e9')
diff --git a/var/spack/repos/builtin/packages/py-requests/package.py b/var/spack/repos/builtin/packages/py-requests/package.py
index 8d1f0d3317..374926c3f9 100644
--- a/var/spack/repos/builtin/packages/py-requests/package.py
+++ b/var/spack/repos/builtin/packages/py-requests/package.py
@@ -12,6 +12,7 @@ class PyRequests(PythonPackage):
homepage = "http://python-requests.org"
pypi = "requests/requests-2.24.0.tar.gz"
+ version('2.26.0', sha256='b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7')
version('2.25.1', sha256='27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804')
version('2.24.0', sha256='b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b')
version('2.23.0', sha256='b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6')
@@ -25,12 +26,16 @@ class PyRequests(PythonPackage):
variant('socks', default=False, description='SOCKS and HTTP proxy support')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.6:', type=('build', 'run'), when='@2.26.0:')
depends_on('py-setuptools', type='build')
- depends_on('py-chardet@3.0.2:4', type=('build', 'run'), when='@2.25.1:')
+ depends_on('py-chardet@3.0.2:4', type=('build', 'run'), when='@2.25.1: ^python@2:2')
depends_on('py-chardet@3.0.2:3', type=('build', 'run'), when='@2.23.0:2.25.0')
depends_on('py-chardet@3.0.2:3.0', type=('build', 'run'), when='@2.16.0:2.22')
- depends_on('py-idna@2.5:2', type=('build', 'run'), when='@2.23.0:')
+ depends_on('py-charset-normalizer@2.0.0:2.0', type=('build', 'run'), when='@2.26.0: ^python@3:')
+ depends_on('py-idna@2.5:3', type=('build', 'run'), when='@2.26.0: ^python@3:')
+ depends_on('py-idna@2.5:2', type=('build', 'run'), when='@2.26.0: ^python@:2')
+ depends_on('py-idna@2.5:2', type=('build', 'run'), when='@2.23:2.25')
depends_on('py-idna@2.5:2.8', type=('build', 'run'), when='@2.16.0:2.22')
depends_on('py-urllib3@1.21.1:1.26', type=('build', 'run'), when='@2.25.0:')
depends_on('py-urllib3@1.21.1:1.24,1.25.2:1.25', type=('build', 'run'), when='@2.16.0:2.24')
diff --git a/var/spack/repos/builtin/packages/py-retry/package.py b/var/spack/repos/builtin/packages/py-retry/package.py
new file mode 100644
index 0000000000..8e0417f02b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-retry/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyRetry(PythonPackage):
+ """Easy to use retry decorator."""
+
+ homepage = "https://github.com/invl/retry"
+ pypi = "retry/retry-0.9.2.tar.gz"
+
+ version('0.9.2', sha256='f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4')
+
+ depends_on('python@2.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pbr', type='build')
+ depends_on('py-decorator@3.4.2:', type=('build', 'run'))
+ depends_on('py-py@1.4.26:1', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-retworkx/package.py b/var/spack/repos/builtin/packages/py-retworkx/package.py
new file mode 100644
index 0000000000..512c67fbb4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-retworkx/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class PyRetworkx(PythonPackage):
+ """A high performance Python graph library implemented in Rust."""
+
+ homepage = "https://github.com/Qiskit/retworkx"
+ pypi = "retworkx/retworkx-0.5.0.tar.gz"
+
+ version('0.10.2', sha256='ba81cb527de7ff338575905bb6fcbebdf2ab18ae800169a77ab863f855bf0951')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools-rust', type='build')
+ depends_on('py-numpy@1.16.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-rich/package.py b/var/spack/repos/builtin/packages/py-rich/package.py
index ffb6806f36..68be1f4ce8 100644
--- a/var/spack/repos/builtin/packages/py-rich/package.py
+++ b/var/spack/repos/builtin/packages/py-rich/package.py
@@ -14,6 +14,8 @@ class PyRich(PythonPackage):
homepage = "https://github.com/willmcgugan/rich"
pypi = "rich/rich-9.4.0.tar.gz"
+ version('10.14.0', sha256='8bfe4546d56b4131298d3a9e571a0742de342f1593770bd0d4707299f772a0af')
+ version('10.9.0', sha256='ba285f1c519519490034284e6a9d2e6e3f16dc7690f2de3d9140737d81304d22')
version('10.0.0', sha256='4674bd3056a72bb282ad581e3f8092dc110cdcc456b5ba76e34965cb85a69724')
version('9.9.0', sha256='0bd8f42c3a03b7ef5e311d5e37f47bea9d268f541981c169072be5869c007957')
version('9.8.2', sha256='c0d5903b463f015b254d6f52da82af3821d266fe516ae05fdc266e6abba5c3a8')
@@ -28,8 +30,10 @@ class PyRich(PythonPackage):
version('9.4.0', sha256='bde23a1761373fed2802502ff98292c5d735a5389ed96f4fe1be5fb4c2cde8ea')
depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('python@3.6.2:3', type=('build', 'run'), when='@10.14.0:')
depends_on('py-setuptools', type='build')
- depends_on('py-typing-extensions@3.7.4:3', type=('build', 'run'))
+ depends_on('py-typing-extensions@3.7.4:4', type=('build', 'run'), when='@10.14.0: ^python@:3.7')
+ depends_on('py-typing-extensions@3.7.4:3', type=('build', 'run'), when='@:10.13.0')
depends_on('py-dataclasses@0.7:0.8', when='^python@:3.6', type=('build', 'run'))
depends_on('py-pygments@2.6:2', type=('build', 'run'))
depends_on('py-commonmark@0.9.0:0.9', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-rsa/package.py b/var/spack/repos/builtin/packages/py-rsa/package.py
index 45fbc3fa9e..0395f653ae 100644
--- a/var/spack/repos/builtin/packages/py-rsa/package.py
+++ b/var/spack/repos/builtin/packages/py-rsa/package.py
@@ -12,8 +12,10 @@ class PyRsa(PythonPackage):
homepage = "https://stuvel.eu/rsa"
pypi = "rsa/rsa-3.4.2.tar.gz"
+ version('4.7.2', sha256='9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9')
version('4.0', sha256='1a836406405730121ae9823e19c6e806c62bbad73f890574fff50efa4122c487')
version('3.4.2', sha256='25df4e10c263fb88b5ace923dd84bf9aa7f5019687b5e55382ffcdb8bede9db5')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-pyasn1@0.1.3:', type=('build', 'run'))
+ depends_on('python@3.5:3', type=('build', 'run'), when='@4.7.2:')
diff --git a/var/spack/repos/builtin/packages/py-ruamel-yaml-clib/package.py b/var/spack/repos/builtin/packages/py-ruamel-yaml-clib/package.py
index bf4d6ef3f6..436940225c 100644
--- a/var/spack/repos/builtin/packages/py-ruamel-yaml-clib/package.py
+++ b/var/spack/repos/builtin/packages/py-ruamel-yaml-clib/package.py
@@ -13,5 +13,5 @@ class PyRuamelYamlClib(PythonPackage):
version('0.2.0', sha256='b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c')
- depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.5:', type=('build', 'link', 'run'))
depends_on('py-setuptools@28.7.0:', type='build')
diff --git a/var/spack/repos/builtin/packages/py-schema/package.py b/var/spack/repos/builtin/packages/py-schema/package.py
new file mode 100644
index 0000000000..87c3ec5f65
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-schema/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PySchema(PythonPackage):
+ """Simple data validation library"""
+
+ homepage = "https://github.com/keleshev/schema"
+ pypi = "schema/schema-0.7.5.tar.gz"
+
+ version('0.7.5', sha256='f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-contextlib2@0.5.5:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-scikit-build/package.py b/var/spack/repos/builtin/packages/py-scikit-build/package.py
index f6fab07c95..b8d49f7b95 100644
--- a/var/spack/repos/builtin/packages/py-scikit-build/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-build/package.py
@@ -20,8 +20,10 @@ class PyScikitBuild(PythonPackage):
maintainers = ['coreyjadams']
+ version('0.12.0', sha256='c32a415d2e7920a4a966b037403c93b02c8a958d8badf3c60abd4b4493f7d988')
version('0.10.0', sha256='2beec252813b20327072c15e9d997f15972aedcc6a130d0154979ff0fdb1b010')
depends_on('py-setuptools@28.0.0:', type=('build', 'run'))
depends_on('py-packaging', type=('build', 'run'))
depends_on('py-wheel@0.29.0:', type=('build', 'run'))
+ depends_on('py-distro', type=('build', 'run'), when='@0.11:')
diff --git a/var/spack/repos/builtin/packages/py-scikit-fuzzy/package.py b/var/spack/repos/builtin/packages/py-scikit-fuzzy/package.py
new file mode 100644
index 0000000000..2c1ffe81e1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-scikit-fuzzy/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyScikitFuzzy(PythonPackage):
+ """Fuzzy logic toolkit for SciPy"""
+
+ homepage = "https://github.com/scikit-fuzzy/scikit-fuzzy"
+ pypi = "scikit-fuzzy/scikit-fuzzy-0.4.2.tar.gz"
+
+ version('0.4.2', sha256='1ab12424d847ede1bc79670d8058167be7c8dd660b00756e9b844817ceb1e12e')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type=('build',))
+ depends_on('py-networkx@1.9:', type=('build', 'run'))
+ depends_on('py-numpy@1.6:', type=('build', 'run'))
+ depends_on('py-scipy@0.9:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-scikit-learn-extra/package.py b/var/spack/repos/builtin/packages/py-scikit-learn-extra/package.py
new file mode 100644
index 0000000000..03f06e7865
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-scikit-learn-extra/package.py
@@ -0,0 +1,27 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyScikitLearnExtra(PythonPackage):
+ """A set of useful tools compatible with scikit-learn
+
+ scikit-learn-extra is a Python module for machine learning that extends
+ scikit-learn. It includes algorithms that are useful but do not satisfy the
+ scikit-learn inclusion criteria, for instance due to their novelty or lower
+ citation number."""
+
+ homepage = "https://github.com/scikit-learn-contrib/scikit-learn-extra"
+ pypi = "scikit-learn-extra/scikit-learn-extra-0.2.0.tar.gz"
+
+ version('0.2.0', sha256='3b1bb5fedde47920eb4b3fa0a0c18f80cc7359d9d0496720178788c6153b8019')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython@0.28.5:', type='build')
+ depends_on('py-numpy@1.13.3:', type=('build', 'run'))
+ depends_on('py-scipy@0.19.1:', type=('build', 'run'))
+ depends_on('py-scikit-learn@0.23:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-scikit-optimize/package.py b/var/spack/repos/builtin/packages/py-scikit-optimize/package.py
index b3d62fc1d3..fdf8db4756 100644
--- a/var/spack/repos/builtin/packages/py-scikit-optimize/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-optimize/package.py
@@ -16,15 +16,24 @@ class PyScikitOptimize(PythonPackage):
homepage = "https://scikit-optimize.github.io"
pypi = "scikit-optimize/scikit-optimize-0.5.2.tar.gz"
+ git = "https://github.com/scikit-optimize/scikit-optimize.git"
+ maintainers = ['liuyangzhuan']
+
+ version('master', branch='master')
version('0.5.2', sha256='1d7657a4b8ef9aa6d81e49b369c677c584e83269f11710557741d3b3f8fa0a75')
variant('plots', default=True,
description='Build with plot support from py-matplotlib')
+ variant('gptune', default=False,
+ description='Build with patches for GPTune')
depends_on('py-setuptools', type='build')
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-scipy@0.14.0:', type=('build', 'run'))
depends_on('py-scikit-learn@0.19.1:', type=('build', 'run'))
+ depends_on('py-pyyaml', when='+gptune', type=('build', 'run'))
depends_on('py-matplotlib', when='+plots')
+
+ patch('space.patch', when='+gptune')
diff --git a/var/spack/repos/builtin/packages/py-scikit-optimize/space.patch b/var/spack/repos/builtin/packages/py-scikit-optimize/space.patch
new file mode 100644
index 0000000000..c608bbb78a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-scikit-optimize/space.patch
@@ -0,0 +1,57 @@
+diff --git a/skopt/space/space.py b/skopt/space/space.py
+old mode 100644
+new mode 100755
+index 329b0c1..bed1e05
+--- a/skopt/space/space.py
++++ b/skopt/space/space.py
+@@ -244,13 +244,13 @@ class Real(Dimension):
+ name : str or None
+ Name associated with the dimension, e.g., "learning rate".
+
+- dtype : str or dtype, default=float
++ dtype : str or dtype, default=np.float
+ float type which will be used in inverse_transform,
+ can be float.
+
+ """
+ def __init__(self, low, high, prior="uniform", base=10, transform=None,
+- name=None, dtype=float):
++ name=None, dtype=np.float, optimize=True):
+ if high <= low:
+ raise ValueError("the lower bound {} has to be less than the"
+ " upper bound {}".format(low, high))
+@@ -267,6 +267,7 @@ class Real(Dimension):
+ self._rvs = None
+ self.transformer = None
+ self.transform_ = transform
++ self.optimize = optimize
+ if isinstance(self.dtype, str) and self.dtype\
+ not in ['float', 'float16', 'float32', 'float64']:
+ raise ValueError("dtype must be 'float', 'float16', 'float32'"
+@@ -346,7 +347,8 @@ class Real(Dimension):
+ self.low, self.high).astype(self.dtype)
+ if self.dtype == float or self.dtype == 'float':
+ # necessary, otherwise the type is converted to a numpy type
+- return getattr(inv_transform, "tolist", lambda: value)()
++ inv_transform = getattr(inv_transform, "tolist", lambda: value)()
++ return [round(val, 6) for val in inv_transform]
+ else:
+ return inv_transform
+
+@@ -435,7 +437,7 @@ class Integer(Dimension):
+
+ """
+ def __init__(self, low, high, prior="uniform", base=10, transform=None,
+- name=None, dtype=np.int64):
++ name=None, dtype=np.int64, optimize=True):
+ if high <= low:
+ raise ValueError("the lower bound {} has to be less than the"
+ " upper bound {}".format(low, high))
+@@ -450,6 +452,7 @@ class Integer(Dimension):
+ self.name = name
+ self.dtype = dtype
+ self.transform_ = transform
++ self.optimize = optimize
+ self._rvs = None
+ self.transformer = None
+
diff --git a/var/spack/repos/builtin/packages/py-scinum/package.py b/var/spack/repos/builtin/packages/py-scinum/package.py
new file mode 100644
index 0000000000..1c364a6388
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-scinum/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyScinum(PythonPackage):
+ """Scientific numbers with multiple uncertainties and
+ correlation-aware, gaussian propagation and numpy"""
+
+ homepage = "https://github.com/riga/scinum"
+ pypi = "scinum/scinum-1.2.0.tar.gz"
+
+ version('1.2.0', sha256='31802d9b580f3a89c0876f34432851bc4def9cb2844d6f3c8e044480f2dd2f91')
+
+ depends_on('python@2.7:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py
index 8184debba5..12a9f02eef 100644
--- a/var/spack/repos/builtin/packages/py-scipy/package.py
+++ b/var/spack/repos/builtin/packages/py-scipy/package.py
@@ -18,6 +18,8 @@ class PyScipy(PythonPackage):
maintainers = ['adamjstewart']
version('master', branch='master')
+ version('1.7.3', sha256='ab5875facfdef77e0a47d5fd39ea178b58e60e454a4c85aa1e52fcb80db7babf')
+ version('1.7.2', sha256='fa2dbabaaecdb502641b0b3c00dec05fb475ae48655c66da16c9ed24eda1e711')
version('1.7.1', sha256='6b47d5fa7ea651054362561a28b1ccc8da9368a39514c1bbf6c0977a1c376764')
version('1.7.0', sha256='998c5e6ea649489302de2c0bc026ed34284f531df89d2bdc8df3a0d44d165739')
version('1.6.3', sha256='a75b014d3294fce26852a9d04ea27b5671d86736beb34acdfc05859246260707')
@@ -51,14 +53,16 @@ class PyScipy(PythonPackage):
depends_on('python@3.5:', when='@1.3:1.4', type=('build', 'link', 'run'))
depends_on('python@3.6:', when='@1.5.0:1.5', type=('build', 'link', 'run'))
depends_on('python@3.7:', when='@1.6:1.6.1', type=('build', 'link', 'run'))
- depends_on('python@3.7:3.9', when='@1.6.2:', type=('build', 'link', 'run'))
+ depends_on('python@3.7:3.9', when='@1.6.2:1.7.1', type=('build', 'link', 'run'))
+ depends_on('python@3.7:3.10', when='@1.7.2:', type=('build', 'link', 'run'))
depends_on('py-setuptools', when='@:1.5', type='build')
depends_on('py-setuptools@:51.0.0', when='@1.6', type='build')
depends_on('py-setuptools@:57', when='@1.7:', type='build')
depends_on('py-pybind11@2.2.4:', when='@1.4.0', type=('build', 'link'))
depends_on('py-pybind11@2.4.0:', when='@1.4.1:1.4', type=('build', 'link'))
depends_on('py-pybind11@2.4.3:', when='@1.5:1.6.1', type=('build', 'link'))
- depends_on('py-pybind11@2.4.3:2.6', when='@1.6.2:', type=('build', 'link'))
+ depends_on('py-pybind11@2.4.3:2.6', when='@1.6.2:1.7.1', type=('build', 'link'))
+ depends_on('py-pybind11@2.4.3:2.7', when='@1.7.2:', type=('build', 'link'))
depends_on('py-numpy@1.5.1:+blas+lapack', when='@:0.15', type=('build', 'link', 'run'))
depends_on('py-numpy@1.6.2:+blas+lapack', when='@0.16:0.17', type=('build', 'link', 'run'))
depends_on('py-numpy@1.7.1:+blas+lapack', when='@0.18.0:0.18', type=('build', 'link', 'run'))
@@ -67,8 +71,9 @@ class PyScipy(PythonPackage):
depends_on('py-numpy@1.14.5:+blas+lapack', when='@1.5.0:1.5', type=('build', 'link', 'run'))
depends_on('py-numpy@1.16.5:+blas+lapack', when='@1.6:1.6.1', type=('build', 'link', 'run'))
depends_on('py-numpy@1.16.5:1.22+blas+lapack', when='@1.6.2:', type=('build', 'link', 'run'))
- depends_on('py-cython@0.29.18:2.9', when='@1.7:', type='build')
- depends_on('py-pythran@0.9.11:', when='@1.7:', type=('build', 'link'))
+ depends_on('py-cython@0.29.18:2', when='@1.7:', type='build')
+ depends_on('py-pythran@0.9.11', when='@1.7.0:1.7.1', type=('build', 'link'))
+ depends_on('py-pythran@0.9.12:0.9', when='@1.7.2:', type=('build', 'link'))
depends_on('py-pytest', type='test')
# NOTE: scipy picks up Blas/Lapack from numpy, see
diff --git a/var/spack/repos/builtin/packages/py-selectors34/package.py b/var/spack/repos/builtin/packages/py-selectors34/package.py
new file mode 100644
index 0000000000..55f86bc604
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-selectors34/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# Package automatically generated using 'pip2spack' converter
+
+
+class PySelectors34(PythonPackage):
+ """
+ Backport of the selectors module from Python 3.4.
+ """
+
+ homepage = "https://github.com/berkerpeksag/selectors34"
+ pypi = 'selectors34/selectors34-1.2.tar.gz'
+ maintainers = ['liuyangzhuan']
+
+ version('1.2', sha256='09f5066337f8a76fb5233f267873f89a27a17c10bf79575954894bb71686451c')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-send2trash/package.py b/var/spack/repos/builtin/packages/py-send2trash/package.py
index ef8b88a9ed..9640592d76 100644
--- a/var/spack/repos/builtin/packages/py-send2trash/package.py
+++ b/var/spack/repos/builtin/packages/py-send2trash/package.py
@@ -12,6 +12,7 @@ class PySend2trash(PythonPackage):
homepage = "https://github.com/hsoft/send2trash"
url = "https://github.com/hsoft/send2trash/archive/1.5.0.tar.gz"
+ version('1.8.0', sha256='937b038abd9f1e7b8c5d7a116be5dc4663beb71df74dcccffe56cacf992c7a9c')
version('1.5.0', sha256='7cebc0ffc8b6d6e553bce9c6bb915614610ba2dec17c2f0643b1b97251da2a41')
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-serpent/package.py b/var/spack/repos/builtin/packages/py-serpent/package.py
new file mode 100644
index 0000000000..b2113cfbf4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-serpent/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# Package automatically generated using 'pip2spack' converter
+
+
+class PySerpent(PythonPackage):
+ """
+ Serialization based on ast.literal_eval
+ """
+
+ homepage = "https://github.com/irmen/Serpent"
+ pypi = 'serpent/serpent-1.40.tar.gz'
+ maintainers = ['liuyangzhuan']
+
+ version('1.40', sha256='10b34e7f8e3207ee6fb70dcdc9bce473851ee3daf0b47c58aec1b48032ac11ce')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@3.2:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-setupmeta/package.py b/var/spack/repos/builtin/packages/py-setupmeta/package.py
new file mode 100644
index 0000000000..0b7c9baf39
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-setupmeta/package.py
@@ -0,0 +1,18 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PySetupmeta(PythonPackage):
+ """Simplify your setup.py."""
+
+ homepage = "https://github.com/codrsquad/setupmeta"
+ pypi = "setupmeta/setupmeta-3.3.0.tar.gz"
+
+ version('3.3.0', sha256='32914af4eeffb8bf1bd45057254d9dff4d16cb7ae857141e07698f7ac19dc960')
+
+ depends_on('python@2.7:', type=('build', 'run'))
+ depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-setuptools-cpp/package.py b/var/spack/repos/builtin/packages/py-setuptools-cpp/package.py
new file mode 100644
index 0000000000..5f4af3deb2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-setuptools-cpp/package.py
@@ -0,0 +1,46 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PySetuptoolsCpp(PythonPackage):
+ """Simplified packaging for pybind11-based C++ extensions"""
+
+ homepage = "https://github.com/dmontagu/setuptools-cpp"
+ pypi = "setuptools_cpp/setuptools_cpp-0.1.0.tar.gz"
+
+ maintainers = ['dorton']
+
+ version('0.1.0', sha256='4fd5e08603237578d06d28efd592d9847b523ede3e502f660be44b1e6254674d')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-appdirs', type=('build', 'run'))
+ depends_on('py-atomicwrites', type=('build', 'run'))
+ depends_on('py-attrs', type=('build', 'run'))
+ depends_on('py-black', type=('build', 'run'))
+ depends_on('py-chardet', type=('build', 'run'))
+ depends_on('py-colorama', type=('build', 'run'))
+ depends_on('py-chardet', type=('build', 'run'))
+ depends_on('py-certifi', type=('build', 'run'))
+ depends_on('py-click', type=('build', 'run'))
+ depends_on('py-codecov', type=('build', 'run'))
+ depends_on('py-coverage', type=('build', 'run'))
+ depends_on('py-entrypoints', type=('build', 'run'))
+ depends_on('py-flake8', type=('build', 'run'))
+ depends_on('py-importlib-metadata', type=('build', 'run'))
+ depends_on('py-isort', type=('build', 'run'))
+ depends_on('py-jinja2', type=('build', 'run'))
+ depends_on('py-markdown', type=('build', 'run'))
+ depends_on('py-markupsafe', type=('build', 'run'))
+ depends_on('py-mccabe', type=('build', 'run'))
+ depends_on('py-more-itertools', type=('build', 'run'))
+ depends_on('py-mypy', type=('build', 'run'))
+ depends_on('py-mypy-extensions', type=('build', 'run'))
+ depends_on('py-packaging', type=('build', 'run'))
+ depends_on('py-pathspec', type=('build', 'run'))
+ depends_on('py-pluggy', type=('build', 'run'))
+ depends_on('py-tornado', type=('build', 'run'))
+ depends_on('py-typed-ast', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py
index b8f1710ea9..71804b5a69 100644
--- a/var/spack/repos/builtin/packages/py-setuptools/package.py
+++ b/var/spack/repos/builtin/packages/py-setuptools/package.py
@@ -13,6 +13,7 @@ class PySetuptools(PythonPackage):
homepage = "https://github.com/pypa/setuptools"
pypi = "setuptools/setuptools-57.4.0.tar.gz"
+ version('59.4.0', sha256='b4c634615a0cf5b02cf83c7bedffc8da0ca439f00e79452699454da6fbd4153d')
version('58.2.0', sha256='2c55bdb85d5bb460bd2e3b12052b677879cffcf46c0c688f2e5bf51d36001145')
version('57.4.0', sha256='6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465')
version('57.1.0', sha256='cfca9c97e7eebbc8abe18d5e5e962a08dcad55bb63afddd82d681de4d22a597b')
diff --git a/var/spack/repos/builtin/packages/py-shellingham/package.py b/var/spack/repos/builtin/packages/py-shellingham/package.py
new file mode 100644
index 0000000000..a08ac33ce6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-shellingham/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyShellingham(PythonPackage):
+ """Tool to Detect Surrounding Shell"""
+
+ homepage = "https://github.com/sarugaku/shellingham"
+ pypi = "shellingham/shellingham-1.4.0.tar.gz"
+
+ version('1.4.0', sha256='4855c2458d6904829bd34c299f11fdeed7cfefbf8a2c522e4caea6cd76b3171e')
+
+ depends_on('python@2.6:2.7,3.4:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-wheel', type='build')
diff --git a/var/spack/repos/builtin/packages/py-sina/no_orjson.patch b/var/spack/repos/builtin/packages/py-sina/no_orjson.patch
new file mode 100644
index 0000000000..de937f4157
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sina/no_orjson.patch
@@ -0,0 +1,15 @@
+--- a/python/setup.py
++++ b/python/setup.py
+@@ -51,9 +51,7 @@ setup(name='llnl-sina',
+ 'six',
+ 'sqlalchemy',
+ 'enum34;python_version<"3.4"',
+- 'orjson;python_version>="3.6" and platform_machine!="ppc64le"',
+- 'ujson;python_version>="3.6" and platform_machine=="ppc64le"',
+- 'ujson<4;python_version<"3.6" and platform_machine!="ppc64le"',
++ 'ujson',
+ ],
+ license='MIT',
+ classifiers=[
+
+
diff --git a/var/spack/repos/builtin/packages/py-sina/package.py b/var/spack/repos/builtin/packages/py-sina/package.py
new file mode 100644
index 0000000000..deb46cd2ff
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sina/package.py
@@ -0,0 +1,44 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack import *
+
+
+class PySina(PythonPackage):
+ """Sina allows codes to store, query, and visualize their data through an
+ easy-to-use Python API. Data that fits its recognized schema can be ingested
+ into one or more supported backends.
+ Sina's API is independent of backend and gives users the benefits of a database
+ without requiring knowledge of one, allowing queries to be expressed in pure
+ Python. Visualizations are also provided through Python.
+
+ Sina is intended especially for use with run metadata,
+ allowing users to easily and efficiently find simulation runs that match some
+ criteria.
+ """
+
+ homepage = "https://github.com/LLNL/Sina"
+ git = "https://github.com/LLNL/Sina.git"
+
+ # notify when the package is updated.
+ maintainers = [
+ 'HaluskaR',
+ 'estebanpauli',
+ 'murray55',
+ 'doutriaux1',
+ ]
+ version('1.11.0', tag="v1.11.0")
+ version('1.10.0', tag="v1.10.0")
+
+ # let's remove dependency on orjson
+ patch('no_orjson.patch')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-enum34', when='^python@:3.3', type=('build', 'run'))
+ depends_on('py-ujson', type=('build', 'run'))
+ depends_on("py-sqlalchemy", type=("build", "run"))
+ depends_on("py-six", type=("build", "run"))
+
+ build_directory = 'python'
diff --git a/var/spack/repos/builtin/packages/py-singledispatch/package.py b/var/spack/repos/builtin/packages/py-singledispatch/package.py
index aba411640f..29582d6308 100644
--- a/var/spack/repos/builtin/packages/py-singledispatch/package.py
+++ b/var/spack/repos/builtin/packages/py-singledispatch/package.py
@@ -11,8 +11,12 @@ class PySingledispatch(PythonPackage):
pypi = "singledispatch/singledispatch-3.4.0.3.tar.gz"
+ version('3.7.0', sha256='c1a4d5c1da310c3fd8fccfb8d4e1cb7df076148fd5d858a819e37fffe44f3092')
version('3.4.0.3', sha256='5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c')
+ depends_on('python@2.6:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@42:', type='build', when='@3.7:')
+ depends_on('py-setuptools-scm@3.4.1: +toml', type='build', when='@3.7:')
depends_on('py-six', type=('build', 'run'))
depends_on('py-ordereddict', when="^python@:2.6", type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-sip/package.py b/var/spack/repos/builtin/packages/py-sip/package.py
index 2a6d79fe48..768f2fb989 100644
--- a/var/spack/repos/builtin/packages/py-sip/package.py
+++ b/var/spack/repos/builtin/packages/py-sip/package.py
@@ -8,16 +8,14 @@ import os
from spack import *
-class PySip(Package):
- """SIP is a tool that makes it very easy to create Python bindings for C
- and C++ libraries."""
+class PySip(PythonPackage):
+ """A Python bindings generator for C/C++ libraries."""
- homepage = "https://www.riverbankcomputing.com/software/sip/intro"
- url = "https://www.riverbankcomputing.com/hg/sip/archive/4.19.21.tar.gz"
- list_url = "https://www.riverbankcomputing.com/hg/sip/archive"
- hg = "https://www.riverbankcomputing.com/hg/sip"
+ homepage = "https://www.riverbankcomputing.com/software/sip"
+ pypi = "sip/sip-6.4.0.tar.gz"
- version('develop', hg=hg) # wasn't actually able to clone this
+ version('6.4.0', sha256='42ec368520b8da4a0987218510b1b520b4981e4405086c1be384733affc2bcb0')
+ version('5.5.0', sha256='5d024c419b30fea8a6de8c71a560c7ab0bc3c221fbfb14d55a5b865bd58eaac5')
version('4.19.21', sha256='3bfd58e875a87471c00e008f25a01d8312885aa01efc4f688e5cac861c8676e4')
version('4.19.20', sha256='475f85277a6601c406ade508b6c935b9f2a170c16fd3ae9dd4cdee7a4f7f340d')
version('4.19.19', sha256='348cd6229b095a3090e851555814f5147bffcb601cec891f1038eb6b38c9d856')
@@ -25,47 +23,59 @@ class PySip(Package):
version('4.19.15', sha256='02bff1ac89253e12cdf1406ad39f841d0e264b0d96a7de13dfe9e29740df2053')
version('4.19.13', sha256='92193fcf990503bf29f03e290efc4ee1812d556efc18acf5c8b88c090177a630')
- variant('module', default='sip', description='Name of private SIP module',
+ variant('module', default='sip', when='@:4', description='Name of private SIP module',
values=str, multi=False)
- extends('python')
+ depends_on('python@3.6:', when='@6:', type=('build', 'run'))
+ depends_on('python@3.5.1:', when='@5:', type=('build', 'run'))
+ depends_on('py-packaging', when='@5:', type='build')
+ depends_on('py-setuptools@30.3:', when='@5:', type='build')
+ depends_on('py-toml', when='@5:', type='build')
+ depends_on('flex', when='@:4', type='build')
+ depends_on('bison', when='@:4', type='build')
- depends_on('flex', type='build')
- depends_on('bison', type='build')
-
- # https://www.riverbankcomputing.com/static/Docs/sip/installation.html
+ # needed for @:4
phases = ['configure', 'build', 'install']
+ def url_for_version(self, version):
+ if version < Version('5.0.0'):
+ return "https://www.riverbankcomputing.com/hg/sip/archive/{0}.tar.gz".format(version.dotted)
+ return super(PySip, self).url_for_version(version)
+
@run_before('configure')
def prepare(self):
- if not os.path.exists('configure.py'):
+ if self.spec.satisfies('@:4') and not os.path.exists('configure.py'):
python('build.py', 'prepare')
def configure(self, spec, prefix):
- args = [
- '--sip-module={0}'.format(spec.variants['module'].value),
- '--bindir={0}'.format(prefix.bin),
- '--destdir={0}'.format(site_packages_dir),
- '--incdir={0}'.format(python_include_dir),
- '--sipdir={0}'.format(prefix.share.sip),
- '--stubsdir={0}'.format(site_packages_dir),
- ]
-
- python('configure.py', *args)
-
+ if self.spec.satisfies('@:4'):
+ args = [
+ '--sip-module={0}'.format(spec.variants['module'].value),
+ '--bindir={0}'.format(prefix.bin),
+ '--destdir={0}'.format(site_packages_dir),
+ '--incdir={0}'.format(python_include_dir),
+ '--sipdir={0}'.format(prefix.share.sip),
+ '--stubsdir={0}'.format(site_packages_dir),
+ ]
+
+ python('configure.py', *args)
+
+ @when('@:4')
def build(self, spec, prefix):
make()
+ @when('@:4')
def install(self, spec, prefix):
make('install')
@run_after('install')
def extend_path_setup(self):
- # See github issue #14121 and PR #15297
- module = self.spec.variants['module'].value
- if module != 'sip':
- module = module.split('.')[0]
- with working_dir(site_packages_dir):
- with open(os.path.join(module, '__init__.py'), 'w') as f:
- f.write('from pkgutil import extend_path\n')
- f.write('__path__ = extend_path(__path__, __name__)\n')
+ if self.spec.satisfies('@:4'):
+ # See github issue #14121 and PR #15297
+ module = self.spec.variants['module'].value
+ if module != 'sip':
+ module = module.split('.')[0]
+ with working_dir(site_packages_dir):
+ with open(os.path.join(module, '__init__.py'), 'w') as f:
+ f.write('from pkgutil import extend_path\n')
+ f.write('__path__ = extend_path(__path__, __name__)\n')
diff --git a/var/spack/repos/builtin/packages/py-skl2onnx/package.py b/var/spack/repos/builtin/packages/py-skl2onnx/package.py
new file mode 100644
index 0000000000..591fa9c5ea
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-skl2onnx/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PySkl2onnx(PythonPackage):
+ """Convert scikit-learn models to ONNX"""
+
+ homepage = "https://github.com/onnx/sklearn-onnx"
+ pypi = "skl2onnx/skl2onnx-1.10.3.tar.gz"
+
+ version('1.10.3', sha256='798933378145412b9876ab3ff2c1dd5f241a7296406d786262000afa8d329628')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.15:', type=('build', 'run'))
+ depends_on('py-scipy@1.0:', type=('build', 'run'))
+ depends_on('py-protobuf', type=('build', 'run'))
+ depends_on('py-onnx@1.2.1:', type=('build', 'run'))
+ depends_on('py-scikit-learn@0.19:', type=('build', 'run'))
+ depends_on('py-onnxconverter-common@1.7.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-slepc4py/package.py b/var/spack/repos/builtin/packages/py-slepc4py/package.py
index b86fe451ed..afbbd7fa77 100644
--- a/var/spack/repos/builtin/packages/py-slepc4py/package.py
+++ b/var/spack/repos/builtin/packages/py-slepc4py/package.py
@@ -11,12 +11,13 @@ class PySlepc4py(PythonPackage):
"""
homepage = "https://gitlab.com/slepc/slepc4py"
- url = "https://slepc.upv.es/download/distrib/slepc4py-3.16.0.tar.gz"
+ url = "https://slepc.upv.es/download/distrib/slepc4py-3.16.1.tar.gz"
git = "https://gitlab.com/slepc/slepc.git"
maintainers = ['joseeroman', 'balay']
version('main', branch='main')
+ version('3.16.1', sha256='3ce93de975fa3966794efb09c315b6aff17e412197f99edb66bbfa71fc49093b')
version('3.16.0', sha256='e18850ebccb1e7c59accfbdbe4d004402abbde7f4e1291b0d2c5b560b308fb88')
version('3.15.2', sha256='c87135989c4d95b9c92a5b615a95eddc34b69dad9cc28b27d3cb7dfaec46177b')
version('3.15.1', sha256='bcdab6d2101ae00e189f4b33072805358cee2dda806a6b6a8e3c2f1b9f619dfd')
diff --git a/var/spack/repos/builtin/packages/py-sphinx-argparse/package.py b/var/spack/repos/builtin/packages/py-sphinx-argparse/package.py
new file mode 100644
index 0000000000..a28978a201
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sphinx-argparse/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PySphinxArgparse(PythonPackage):
+ """Sphinx extension to automatically document argparse-based commands."""
+
+ homepage = "https://pypi.org/project/sphinx-argparse"
+ pypi = "sphinx-argparse/sphinx-argparse-0.3.1.tar.gz"
+
+ maintainers = ['sethrj']
+
+ version('0.3.1', sha256='82151cbd43ccec94a1530155f4ad34f251aaca6a0ffd5516d7fadf952d32dc1e')
+
+ depends_on('python@2.7.0:2.7,3.5:', type=('build', 'run'))
+ depends_on('py-sphinx@1.2.0:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-sphinx-multiversion/package.py b/var/spack/repos/builtin/packages/py-sphinx-multiversion/package.py
new file mode 100644
index 0000000000..bbeb14e967
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sphinx-multiversion/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PySphinxMultiversion(PythonPackage):
+ """A Sphinx extension for building self-hosted versioned documentation."""
+
+ homepage = "https://github.com/Holzhaus/sphinx-multiversion"
+ pypi = "sphinx-multiversion/sphinx-multiversion-0.2.4.tar.gz"
+
+ version('0.2.4', sha256='5cd1ca9ecb5eed63cb8d6ce5e9c438ca13af4fa98e7eb6f376be541dd4990bcb')
+ version('0.2.3', sha256='e46565ac2f703f3b55652f33c159c8059865f5d13dae7f0e8403e5afc2996f5f')
+ version('0.2.2', sha256='c0a4f2cbb13eb62b5cd79e2f6901e5d90ea191d3f37e96e1f15b976827de0ac0')
+ version('0.2.1', sha256='0775847454965005a3a8433c1bf38379f723c026de9c4a7ddd447b0349df90c1')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-sphinx', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py
index bfd0456165..e7aaff2636 100644
--- a/var/spack/repos/builtin/packages/py-sphinx/package.py
+++ b/var/spack/repos/builtin/packages/py-sphinx/package.py
@@ -12,6 +12,7 @@ class PySphinx(PythonPackage):
homepage = "https://www.sphinx-doc.org/en/master/"
pypi = "Sphinx/Sphinx-3.2.0.tar.gz"
+ version('4.3.1', sha256='32a5b3e9a1b176cc25ed048557d4d3d01af635e6b76c5bc7a43b0a34447fbd45')
version('4.1.2', sha256='3092d929cd807926d846018f2ace47ba2f3b671b309c7a89cd3306e80c826b13')
version('4.1.1', sha256='23c846a1841af998cb736218539bb86d16f5eb95f5760b1966abcd2d584e62b8')
version('4.0.2', sha256='b5c2ae4120bf00c799ba9b3699bc895816d272d120080fbc967292f29b52b48c')
@@ -57,8 +58,8 @@ class PySphinx(PythonPackage):
depends_on('py-docutils@0.14:0.17', when='@4:', type=('build', 'run'))
depends_on('py-docutils@0.12:', when='@:3', type=('build', 'run'))
depends_on('py-snowballstemmer@1.1:', type=('build', 'run'))
- depends_on('py-babel@1.3:1,2.1:', type=('build', 'run'))
- depends_on('py-alabaster@0.7.0:0.7', type=('build', 'run'))
+ depends_on('py-babel@1.3:', type=('build', 'run'))
+ depends_on('py-alabaster@0.7', type=('build', 'run'))
depends_on('py-imagesize', when='@1.4:', type=('build', 'run'))
depends_on('py-requests@2.5.0:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
@@ -66,3 +67,4 @@ class PySphinx(PythonPackage):
depends_on('py-packaging', when='@1.7.4:', type=('build', 'run'))
depends_on('py-typing', when='@1.6.1', type=('build', 'run'))
depends_on('py-typing', when='@1.6.2:^python@2.7:3.4', type=('build', 'run'))
+ depends_on('py-colorama@0.3.5:', when='platform=windows', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-stevedore/package.py b/var/spack/repos/builtin/packages/py-stevedore/package.py
index 7900375d4e..feb368d633 100644
--- a/var/spack/repos/builtin/packages/py-stevedore/package.py
+++ b/var/spack/repos/builtin/packages/py-stevedore/package.py
@@ -12,9 +12,14 @@ class PyStevedore(PythonPackage):
homepage = "https://docs.openstack.org/stevedore/latest/"
pypi = "stevedore/stevedore-1.28.0.tar.gz"
+ version('3.5.0', sha256='f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335')
version('1.28.0', sha256='f1c7518e7b160336040fee272174f1f7b29a46febb3632502a8f2055f973d60b')
- depends_on('python@2.6:')
+ depends_on('python@2.6:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@3.5.0:')
- depends_on('py-six@1.10.0:', type=('build', 'run'))
- depends_on('py-pbr@2.0.0:2.1.0', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+
+ depends_on('py-six@1.10.0:', type=('build', 'run'), when='@:3.4')
+ depends_on('py-pbr@2.0.0:2.1.0', type=('build', 'run'), when='@:3.4')
+ depends_on('py-pbr@2.0.0:', type=('build', 'run'), when='@3.5.0:')
diff --git a/var/spack/repos/builtin/packages/py-symengine/package.py b/var/spack/repos/builtin/packages/py-symengine/package.py
index 7b5b4d6eda..0411c455bf 100644
--- a/var/spack/repos/builtin/packages/py-symengine/package.py
+++ b/var/spack/repos/builtin/packages/py-symengine/package.py
@@ -12,16 +12,24 @@ class PySymengine(PythonPackage):
homepage = "https://github.com/symengine/symengine.py"
pypi = "symengine/symengine-0.2.0.tar.gz"
git = "https://github.com/symengine/symengine.py.git"
+ phases = ['install']
- version('develop', branch='master')
+ version('master', branch='master')
+ # pypi source doesn't have necessary files in cmake directory
+ version('0.8.1',
+ url='https://github.com/symengine/symengine.py/archive/refs/tags/v0.8.1.tar.gz',
+ sha256='02fe79e6d5e9b39a1d4e6fee05a2c1d1b10fd032157c7738ed97e32406ffb087')
version('0.2.0', sha256='78a14aea7aad5e7cbfb5cabe141581f9bba30e3c319690e5db8ad99fdf2d8885')
# Build dependencies
- depends_on('python@2.7:2.8,3.3:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.3:', type=('build', 'run'), when='@0.2.0')
+ depends_on('python@3.6:3', type=('build', 'run'), when='@0.8.1:')
depends_on('py-setuptools', type='build')
- depends_on('py-cython@0.19.1:', type='build')
- depends_on('cmake@2.8.7:', type='build')
- depends_on('symengine@0.2.0:')
+ depends_on('py-cython@0.19.1:', type='build', when='@0.2.0')
+ depends_on('py-cython@0.29.24:', type='build', when='@0.8.1:')
+ depends_on('cmake@2.8.12:', type='build')
+ depends_on('symengine@0.2.0', when='@0.2.0')
+ depends_on('symengine@0.8.1', when='@0.8.1')
- def build_args(self, spec, prefix):
+ def install_args(self, spec, prefix):
return ['--symengine-dir={0}'.format(spec['symengine'].prefix)]
diff --git a/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py
new file mode 100644
index 0000000000..5a76e3c433
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py
@@ -0,0 +1,37 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyTensorflowDatasets(PythonPackage):
+ """tensorflow/datasets is a library of datasets ready to use with
+ TensorFlow."""
+
+ homepage = "https://github.com/tensorflow/datasets"
+ pypi = "tensorflow-datasets/tensorflow-datasets-4.4.0.tar.gz"
+
+ version(
+ "4.4.0",
+ sha256="3e95a61dec1fdb7b05dabc0dbed1b531e13d6c6fd362411423d0a775e5e9b960",
+ )
+
+ depends_on("python@3.6:", type=("build", "run"))
+ depends_on("py-setuptools", type="build")
+ depends_on("py-absl-py", type=("build", "run"))
+ depends_on("py-attrs@18.1.0:", type=("build", "run"))
+ depends_on("py-dill", type=("build", "run"))
+ depends_on("py-future", type=("build", "run"))
+ depends_on("py-numpy", type=("build", "run"))
+ depends_on("py-promise", type=("build", "run"))
+ depends_on("py-protobuf@3.12.2:", type=("build", "run"))
+ depends_on("py-requests@2.19.0:", type=("build", "run"))
+ depends_on("py-six", type=("build", "run"))
+ depends_on("py-tensorflow-metadata", type=("build", "run"))
+ depends_on("py-termcolor", type=("build", "run"))
+ depends_on("py-tqdm", type=("build", "run"))
+ depends_on("py-dataclasses", type=("build", "run"), when="python@:3.6")
+ depends_on("py-typing-extensions", type=("build", "run"), when="python@:3.7")
+ depends_on("py-importlib-resources", type=("build", "run"), when="python@:3.8")
diff --git a/var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py b/var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py
new file mode 100644
index 0000000000..e7d489dbdb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow-metadata/package.py
@@ -0,0 +1,36 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import tempfile
+
+from spack import *
+
+
+class PyTensorflowMetadata(PythonPackage):
+ """Library and standards for schema and statistics.
+
+ TensorFlow Metadata provides standard representations for metadata that are
+ useful when training machine learning models with TensorFlow."""
+
+ homepage = "https://pypi.org/project/tensorflow-metadata/"
+
+ # Only available as a wheel on PyPI
+ url = "https://github.com/tensorflow/metadata/archive/refs/tags/v1.5.0.tar.gz"
+
+ version(
+ "1.5.0",
+ sha256="f0ec8aaf62fd772ef908efe4ee5ea3bc0d67dcbf10ae118415b7b206a1d61745",
+ )
+
+ depends_on("bazel@0.24.1:", type="build")
+ depends_on("python@3.7:3", type=("build", "run"))
+ depends_on("py-setuptools", type="build")
+ depends_on("py-absl-py@0.9:0.12", type=("build", "run"))
+ depends_on("py-googleapis-common-protos@1.52.0:1", type=("build", "run"))
+ depends_on("py-protobuf@3.13:3", type=("build", "run"))
+
+ def setup_build_environment(self, env):
+ tmp_path = tempfile.mkdtemp(prefix="spack")
+ env.set("TEST_TMPDIR", tmp_path)
diff --git a/var/spack/repos/builtin/packages/py-terminado/package.py b/var/spack/repos/builtin/packages/py-terminado/package.py
index 0e7c5c605b..75b60f68ac 100644
--- a/var/spack/repos/builtin/packages/py-terminado/package.py
+++ b/var/spack/repos/builtin/packages/py-terminado/package.py
@@ -9,6 +9,7 @@ class PyTerminado(PythonPackage):
pypi = "terminado/terminado-0.8.3.tar.gz"
+ version('0.12.1', sha256='b20fd93cc57c1678c799799d117874367cc07a3d2d55be95205b1a88fa08393f')
version('0.8.3', sha256='4804a774f802306a7d9af7322193c5390f1da0abb429e082a10ef1d46e6fb2c2')
version('0.8.2', sha256='de08e141f83c3a0798b050ecb097ab6259c3f0331b2f7b7750c9075ced2c20c2')
version('0.8.1', sha256='55abf9ade563b8f9be1f34e4233c7b7bde726059947a593322e8a553cc4c067a')
@@ -17,3 +18,5 @@ class PyTerminado(PythonPackage):
depends_on('py-tornado@4:', type=('build', 'run'))
depends_on('py-ptyprocess', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:', when='@0.8.2:', type=('build', 'run'))
+ depends_on('python@3.6:', when='@0.12.1:', type=('build', 'run'))
+ depends_on('py-setuptools@40.8.0:', when='@0.12.1:', type='build')
diff --git a/var/spack/repos/builtin/packages/py-tern/package.py b/var/spack/repos/builtin/packages/py-tern/package.py
new file mode 100644
index 0000000000..68758667a0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tern/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyTern(PythonPackage):
+ """
+ Tern is a software package inspection tool that can create a Software Bill
+ of Materials (SBoM) for containers.
+ """
+
+ pypi = "tern/tern-2.8.0.tar.gz"
+ git = "https://github.com/tern-tools/tern.git"
+
+ version('main', branch='main')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-wheel', type='build')
+ depends_on('py-pip', type='build')
diff --git a/var/spack/repos/builtin/packages/py-testpath/package.py b/var/spack/repos/builtin/packages/py-testpath/package.py
index f5f8cd4a3a..9766e052ea 100644
--- a/var/spack/repos/builtin/packages/py-testpath/package.py
+++ b/var/spack/repos/builtin/packages/py-testpath/package.py
@@ -13,4 +13,7 @@ class PyTestpath(PythonPackage):
homepage = "https://github.com/jupyter/testpath"
pypi = "testpath/testpath-0.4.2.tar.gz"
+ version('0.5.0', sha256='1acf7a0bcd3004ae8357409fc33751e16d37ccc650921da1094a86581ad1e417')
version('0.4.2', sha256='b694b3d9288dbd81685c5d2e7140b81365d46c29f5db4bc659de5aa6b98780f8')
+
+ depends_on('python@3.5:', type=('build', 'run'), when='@0.5.0:')
diff --git a/var/spack/repos/builtin/packages/py-theano/package.py b/var/spack/repos/builtin/packages/py-theano/package.py
index 5d62a8f136..5fd3b96e04 100644
--- a/var/spack/repos/builtin/packages/py-theano/package.py
+++ b/var/spack/repos/builtin/packages/py-theano/package.py
@@ -15,12 +15,13 @@ class PyTheano(PythonPackage, CudaPackage):
git = "https://github.com/Theano/Theano.git"
version('master', branch='master')
+ version('1.0.5', sha256='6e9439dd53ba995fcae27bf20626074bfc2fff446899dc5c53cb28c1f9202e89')
version('1.0.4', sha256='35c9bbef56b61ffa299265a42a4e8f8cb5a07b2997dabaef0f8830b397086913')
version('1.0.2', sha256='6768e003d328a17011e6fca9126fbb8a6ffd3bb13cb21c450f3e724cca29abde')
version('1.0.1', sha256='88d8aba1fe2b6b75eacf455d01bc7e31e838c5a0fb8c13dde2d9472495ff4662')
version('0.8.2', sha256='7463c8f7ed1a787bf881f36d38a38607150186697e7ce7e78bfb94b7c6af8930')
- depends_on('python@2.6:2.8,3.3:')
+ depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-numpy@1.9.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-threadpoolctl/package.py b/var/spack/repos/builtin/packages/py-threadpoolctl/package.py
index 3f93c470a9..e4fa34ab2f 100644
--- a/var/spack/repos/builtin/packages/py-threadpoolctl/package.py
+++ b/var/spack/repos/builtin/packages/py-threadpoolctl/package.py
@@ -12,6 +12,8 @@ class PyThreadpoolctl(PythonPackage):
homepage = "https://github.com/joblib/threadpoolctl"
pypi = "threadpoolctl/threadpoolctl-2.0.0.tar.gz"
+ version('3.0.0', sha256='d03115321233d0be715f0d3a5ad1d6c065fe425ddc2d671ca8e45e9fd5d7a52a')
version('2.0.0', sha256='48b3e3e9ee079d6b5295c65cbe255b36a3026afc6dde3fb49c085cd0c004bbcf')
depends_on('python@3.5:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@3.0.0:')
diff --git a/var/spack/repos/builtin/packages/py-tifffile/package.py b/var/spack/repos/builtin/packages/py-tifffile/package.py
index fdbfd27233..3ea916d85e 100644
--- a/var/spack/repos/builtin/packages/py-tifffile/package.py
+++ b/var/spack/repos/builtin/packages/py-tifffile/package.py
@@ -9,9 +9,10 @@ from spack import *
class PyTifffile(PythonPackage):
"""Read and write image data from and to TIFF files."""
- homepage = "https://github.com/blink1073/tifffile"
+ homepage = "https://github.com/cgohlke/tifffile"
pypi = "tifffile/tifffile-0.12.1.tar.gz"
+ version('2021.11.2', sha256='153e31fa1d892f482fabb2ae9f2561fa429ee42d01a6f67e58cee13637d9285b')
version('2020.10.1', sha256='799feeccc91965b69e1288c51a1d1118faec7f40b2eb89ad2979591b85324830')
version('0.12.1', sha256='802367effe86b0d1e64cb5c2ed886771f677fa63260b945e51a27acccdc08fa1')
diff --git a/var/spack/repos/builtin/packages/py-tomlkit/package.py b/var/spack/repos/builtin/packages/py-tomlkit/package.py
index 41ea206e34..202240510a 100644
--- a/var/spack/repos/builtin/packages/py-tomlkit/package.py
+++ b/var/spack/repos/builtin/packages/py-tomlkit/package.py
@@ -12,6 +12,7 @@ class PyTomlkit(PythonPackage):
homepage = "https://github.com/sdispater/tomlkit"
pypi = "tomlkit/tomlkit-0.7.0.tar.gz"
+ version('0.7.2', sha256='d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754')
version('0.7.0', sha256='ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py
index 19805f48a0..72c786934e 100644
--- a/var/spack/repos/builtin/packages/py-torch/package.py
+++ b/var/spack/repos/builtin/packages/py-torch/package.py
@@ -23,6 +23,7 @@ class PyTorch(PythonPackage, CudaPackage):
import_modules = ['torch', 'torch.autograd', 'torch.nn', 'torch.utils']
version('master', branch='master', submodules=True)
+ version('1.10.1', tag='v1.10.1', submodules=True)
version('1.10.0', tag='v1.10.0', submodules=True)
version('1.9.1', tag='v1.9.1', submodules=True)
version('1.9.0', tag='v1.9.0', submodules=True)
@@ -219,6 +220,11 @@ class PyTorch(PythonPackage, CudaPackage):
# to detect openmp settings used by Fujitsu compiler.
patch('detect_omp_of_fujitsu_compiler.patch', when='%fj')
+ # Fix compilation of +distributed~tensorpipe
+ # https://github.com/pytorch/pytorch/issues/68002
+ patch('https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch',
+ sha256='e69e41b5c171bfb00d1b5d4ee55dd5e4c8975483230274af4ab461acd37e40b8', when='@1.10.0+distributed~tensorpipe')
+
# Both build and install run cmake/make/make install
# Only run once to speed up build times
phases = ['install']
diff --git a/var/spack/repos/builtin/packages/py-torchgeo/package.py b/var/spack/repos/builtin/packages/py-torchgeo/package.py
index 931f8d474c..dd8086a118 100644
--- a/var/spack/repos/builtin/packages/py-torchgeo/package.py
+++ b/var/spack/repos/builtin/packages/py-torchgeo/package.py
@@ -14,31 +14,39 @@ class PyTorchgeo(PythonPackage):
"""
homepage = "https://github.com/microsoft/torchgeo"
+ pypi = "torchgeo/torchgeo-0.1.0.tar.gz"
git = "https://github.com/microsoft/torchgeo.git"
maintainers = ['adamjstewart', 'calebrob6']
version('main', branch='main')
+ version('0.1.1', sha256='6e28132f75e9d8cb3a3a0e8b443aba3cde26c8f3140b9426139ee6e8f8058b26')
+ version('0.1.0', sha256='44eb3cf10ab2ac63ff95e92fcd3807096bac3dcb9bdfe15a8edac9d440d2f323')
variant('datasets', default=False, description='Install optional dataset dependencies')
- variant('docs', default=False, description='Install documentation dependencies')
variant('style', default=False, description='Install style checking tools')
variant('tests', default=False, description='Install testing tools')
- variant('train', default=False, description='Install optional trainer dependencies')
# Required dependencies
depends_on('python@3.6:+bz2', type=('build', 'run'))
- depends_on('py-setuptools@30.4:', type='build')
+ depends_on('py-setuptools@42:', type='build')
depends_on('py-einops', type=('build', 'run'))
depends_on('py-fiona@1.5:', type=('build', 'run'))
+ depends_on('py-kornia@0.5.4:', type=('build', 'run'))
depends_on('py-matplotlib', type=('build', 'run'))
depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-omegaconf@2.1:', type=('build', 'run'))
depends_on('pil@2.9:', type=('build', 'run'))
depends_on('py-pyproj@2.2:', type=('build', 'run'))
+ depends_on('py-pytorch-lightning@1.3:', type=('build', 'run'))
depends_on('py-rasterio@1.0.16:', type=('build', 'run'))
depends_on('py-rtree@0.5:', type=('build', 'run'))
+ depends_on('py-scikit-learn@0.18:', type=('build', 'run'))
depends_on('py-shapely@1.3:', type=('build', 'run'))
+ depends_on('py-segmentation-models-pytorch@0.2:', type=('build', 'run'))
+ depends_on('py-timm@0.2.1:', type=('build', 'run'))
depends_on('py-torch@1.7:', type=('build', 'run'))
+ depends_on('py-torchmetrics', type=('build', 'run'))
depends_on('py-torchvision@0.3:', type=('build', 'run'))
# Optional dependencies
@@ -50,28 +58,14 @@ class PyTorchgeo(PythonPackage):
depends_on('py-rarfile@3:', type='run')
depends_on('py-scipy@0.9:', type='run')
- with when('+docs'):
- depends_on('py-ipywidgets@7:', type='run')
- depends_on('py-jupyterlab', type='run')
- depends_on('py-nbmake@0.1:', type='run')
- depends_on('py-nbsphinx@0.8.5:', type='run')
- depends_on('py-sphinx@3:', type='run')
- depends_on('py-pydocstyle@6.1:+toml', type='run')
- depends_on('py-pytorch-sphinx-theme', type='run')
-
with when('+style'):
depends_on('py-black@21:', type='run')
depends_on('py-flake8@3.8:', type='run')
depends_on('py-isort@5.8:+colors', type='run')
+ depends_on('py-pydocstyle@6.1:+toml', type='run')
with when('+tests'):
depends_on('py-mypy@0.900:', type='run')
+ depends_on('py-nbmake@0.1:', type='run')
depends_on('py-pytest@6:', type='run')
depends_on('py-pytest-cov@2.4:', type='run')
-
- with when('+train'):
- depends_on('py-omegaconf@2.1:', type='run')
- depends_on('py-pytorch-lightning@1.3:', type='run')
- depends_on('py-scikit-learn@0.18:', type='run')
- depends_on('py-segmentation-models-pytorch@0.2:', type='run')
- depends_on('py-torchmetrics', type='run')
diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py
index 0a8a54fd26..442c40266d 100644
--- a/var/spack/repos/builtin/packages/py-torchvision/package.py
+++ b/var/spack/repos/builtin/packages/py-torchvision/package.py
@@ -16,6 +16,7 @@ class PyTorchvision(PythonPackage):
version('main', branch='main')
version('master', branch='main', deprecated=True)
+ version('0.11.2', sha256='55689c57c29f82438a133d0af3315991037be59c8e02471bdcaa31731154a714')
version('0.11.1', sha256='32a06ccf755e4d75006ce03701f207652747a63dbfdf65f0f20a1b6f93a2e834')
version('0.11.0', sha256='8e85acf8f5d39f27e92e610ccb506dac0bf4412bb366a318d2aa5f384cbd4d2c')
version('0.10.1', sha256='4d595cf0214c8adc817f8e3cd0043a027b52b481e05d67b04f4947fcb43d4277')
@@ -51,6 +52,7 @@ class PyTorchvision(PythonPackage):
# https://github.com/pytorch/vision#installation
depends_on('py-torch@master', when='@master', type=('build', 'link', 'run'))
+ depends_on('py-torch@1.10.1', when='@0.11.2', type=('build', 'link', 'run'))
depends_on('py-torch@1.10.0', when='@0.11.0:0.11.1', type=('build', 'link', 'run'))
depends_on('py-torch@1.9.1', when='@0.10.1', type=('build', 'link', 'run'))
depends_on('py-torch@1.9.0', when='@0.10.0', type=('build', 'link', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-traitlets/package.py b/var/spack/repos/builtin/packages/py-traitlets/package.py
index 3fc0241114..5ea86a8ab4 100644
--- a/var/spack/repos/builtin/packages/py-traitlets/package.py
+++ b/var/spack/repos/builtin/packages/py-traitlets/package.py
@@ -11,6 +11,7 @@ class PyTraitlets(PythonPackage):
pypi = "traitlets/traitlets-5.0.4.tar.gz"
+ version('5.1.1', sha256='059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7')
version('5.0.4', sha256='86c9351f94f95de9db8a04ad8e892da299a088a64fd283f9f6f18770ae5eae1b')
version('4.3.3', sha256='d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7')
version('4.3.2', sha256='9c4bd2d267b7153df9152698efb1050a5d84982d3384a37b2c1f7723ba3e7835')
@@ -24,7 +25,7 @@ class PyTraitlets(PythonPackage):
depends_on('python@3.7:', when='@5:', type=('build', 'run'))
depends_on('python@2.7:2.8,3.3:', type=('build', 'run'))
- depends_on('py-ipython-genutils', type=('build', 'run'))
+ depends_on('py-ipython-genutils', when='@:5.0', type=('build', 'run'))
depends_on('py-six', when='@:4', type=('build', 'run'))
depends_on('py-decorator', when='@:4', type=('build', 'run'))
depends_on('py-enum34', when='^python@:3.3', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-traits/package.py b/var/spack/repos/builtin/packages/py-traits/package.py
index 3fd2871b3e..54c248df07 100644
--- a/var/spack/repos/builtin/packages/py-traits/package.py
+++ b/var/spack/repos/builtin/packages/py-traits/package.py
@@ -10,6 +10,7 @@ class PyTraits(PythonPackage):
homepage = "https://docs.enthought.com/traits"
pypi = "traits/traits-6.0.0.tar.gz"
+ version('6.3.1', sha256='ebdd9b067a262045840a85e3ff34e1567ce4e9b6548c716cdcc82b5884ed9100')
version('6.2.0', sha256='16fa1518b0778fd53bf0547e6a562b1787bf68c8f6b7995a13bd1902529fdb0c')
version('6.0.0', sha256='dbcd70166feca434130a1193284d5819ca72ffbc8dbce8deeecc0cebb41a3bfb')
diff --git a/var/spack/repos/builtin/packages/py-trojanzoo-sphinx-theme/package.py b/var/spack/repos/builtin/packages/py-trojanzoo-sphinx-theme/package.py
new file mode 100644
index 0000000000..a351ce817c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-trojanzoo-sphinx-theme/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyTrojanzooSphinxTheme(PythonPackage):
+ """TrojanZoo Sphinx Theme"""
+
+ homepage = "https://github.com/ain-soph/trojanzoo_sphinx_theme"
+ pypi = "trojanzoo_sphinx_theme/trojanzoo_sphinx_theme-0.1.0.tar.gz"
+
+ version('0.1.0', sha256='7b80d70ec84279156dcb9668d3a8a135be1d0d54e20f554fc03ad22d9ff5e7b3')
+
+ depends_on('python@3:', type=('build', 'run'))
+ depends_on('py-setuptools@40.9:', type='build')
+ depends_on('py-sphinx@4.2:', type=('build', 'run'))
+ depends_on('py-docutils@0.17.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-tweedledum/package.py b/var/spack/repos/builtin/packages/py-tweedledum/package.py
new file mode 100644
index 0000000000..e6ed41ffbf
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tweedledum/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+class PyTweedledum(PythonPackage):
+ """tweedledum is a library for synthesis, compilation, and
+ optimization of quantum circuits. The library is written to be
+ scalable up to problem sizes in which quantum circuits outperform
+ classical ones. Also, it is meant to be used both independently
+ and alongside established tools."""
+
+ homepage = "https://github.com/boschmitt/tweedledum"
+ pypi = 'tweedledum/tweedledum-1.1.1.tar.gz'
+
+ version('1.1.1', sha256='58d6f7a988b10c31be3faa1faf3e58288ef7e8159584bfa6ded45742f390309f')
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-scikit-build@0.12:', type='build')
+ depends_on('py-cmake@3.18:', type='build')
+ depends_on('py-ninja', type='build')
+ depends_on('py-wheel', type='build')
+ depends_on('eigen@3.3:')
+ depends_on('nlohmann-json@3.9.0:')
diff --git a/var/spack/repos/builtin/packages/py-twisted/package.py b/var/spack/repos/builtin/packages/py-twisted/package.py
index 82354c3456..18dbc1527b 100644
--- a/var/spack/repos/builtin/packages/py-twisted/package.py
+++ b/var/spack/repos/builtin/packages/py-twisted/package.py
@@ -9,11 +9,92 @@ from spack import *
class PyTwisted(PythonPackage):
"""An asynchronous networking framework written in Python"""
homepage = "https://twistedmatrix.com/"
- pypi = "Twisted/Twisted-15.3.0.tar.bz2"
+ pypi = "Twisted/Twisted-21.7.0.tar.gz"
+ version('21.7.0', sha256='2cd652542463277378b0d349f47c62f20d9306e57d1247baabd6d1d38a109006')
version('15.4.0', sha256='78862662fa9ae29654bc2b9d349c3f1d887e6b2ed978512c4442d53ea861f05c')
version('15.3.0', sha256='025729751cf898842262375a40f70ae1d246daea88369eab9f6bb96e528bf285')
+ depends_on('python@3.6.7:', type=('build', 'run'), when='@21.7.0:')
+
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@35.0.2:', type='build', when='@21.7.0:')
+
depends_on('py-zope-interface@3.6.0:', type=('build', 'run'), when='^python@:2')
depends_on('py-zope-interface@4.0.2:', type=('build', 'run'), when='^python@3:')
+ depends_on('py-zope-interface@4.4.2:', type=('build', 'run'), when='@21.7.0:')
+
+ depends_on('py-incremental@21.3.0:', type=('build', 'run'), when='@21.7.0:')
+ depends_on('py-constantly@15.1:', type=('build', 'run'), when='@21.7.0:')
+ depends_on('py-automat@0.8.0:', type=('build', 'run'), when='@21.7.0:')
+ depends_on('py-hyperlink@17.1.1:', type=('build', 'run'), when='@21.7.0:')
+ depends_on('py-attrs@19.2.0:', type=('build', 'run'), when='@21.7.0:')
+ depends_on('py-typing-extensions@3.6.5:', type=('build', 'run'), when='@21.7.0:')
+
+ def url_for_version(self, version):
+ url = "https://pypi.io/packages/source/T/Twisted/"
+
+ if version <= Version('20.3.0'):
+ url += 'Twisted-{0}.tar.bz2'
+ else:
+ url += 'Twisted-{0}.tar.gz'
+
+ url = url.format(version)
+ return url
+
+ @property
+ def import_modules(self):
+ modules = ['twisted',
+ 'twisted.positioning',
+ 'twisted.positioning.test',
+ 'twisted.protocols',
+ 'twisted.protocols.test',
+ 'twisted.protocols.haproxy',
+ 'twisted.protocols.haproxy.test',
+ 'twisted.web',
+ 'twisted.web._auth',
+ 'twisted.web.test',
+ 'twisted.scripts',
+ 'twisted.scripts.test',
+ 'twisted.runner',
+ 'twisted.runner.test',
+ 'twisted.cred',
+ 'twisted.cred.test',
+ 'twisted.plugins',
+ 'twisted.enterprise',
+ 'twisted.logger',
+ 'twisted.logger.test',
+ 'twisted.persisted',
+ 'twisted.persisted.test',
+ 'twisted.names',
+ 'twisted.names.test',
+ 'twisted.pair',
+ 'twisted.pair.test',
+ 'twisted.test',
+ 'twisted.tap',
+ 'twisted.python',
+ 'twisted.python.test',
+ 'twisted.trial',
+ 'twisted.trial._dist',
+ 'twisted.trial._dist.test',
+ 'twisted.trial.test',
+ 'twisted.words',
+ 'twisted.words.protocols',
+ 'twisted.words.protocols.jabber',
+ 'twisted.words.im',
+ 'twisted.words.test',
+ 'twisted.words.xish',
+ 'twisted.spread',
+ 'twisted.spread.test',
+ 'twisted.conch',
+ 'twisted.conch.scripts',
+ 'twisted.conch.ui',
+ 'twisted.conch.client',
+ 'twisted.conch.openssh_compat',
+ 'twisted.conch.test',
+ 'twisted.conch.insults',
+ 'twisted.conch.ssh',
+ 'twisted.internet',
+ 'twisted.internet.test']
+
+ return modules
diff --git a/var/spack/repos/builtin/packages/py-uhi/package.py b/var/spack/repos/builtin/packages/py-uhi/package.py
new file mode 100644
index 0000000000..f79e9ab1e4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-uhi/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyUhi(PythonPackage):
+ """Unified Histogram Interface:
+ tools to help library authors work with histograms"""
+
+ homepage = "https://github.com/Scikit-HEP/uhi"
+ pypi = "uhi/uhi-0.3.0.tar.gz"
+
+ version('0.3.0', sha256='3f441bfa89fae11aa762ae1ef1b1b454362d228e9084477773ffb82d6e9f5d2c')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-numpy@1.13.3:', type=('build', 'run'))
+ depends_on('py-typing-extensions@3.7:', type=('build', 'run'), when='^python@:3.7')
+ depends_on('py-setuptools', type='build')
+ # depends_on('py-poetry-core@1:', type='build') -- WIP
diff --git a/var/spack/repos/builtin/packages/py-uproot/package.py b/var/spack/repos/builtin/packages/py-uproot/package.py
index 06cccf211a..33bca1742e 100644
--- a/var/spack/repos/builtin/packages/py-uproot/package.py
+++ b/var/spack/repos/builtin/packages/py-uproot/package.py
@@ -23,6 +23,7 @@ class PyUproot(PythonPackage):
tags = ['hep']
+ version('4.1.8', sha256='09c46edc864520af50d018055e3d3577a4c6c37489484a664edfa4f1496b6755')
version('4.0.11', sha256='5c8f62c7eeaa50e1315e05469580130d0bcc50a6cb4456825777f73cfaf5938a')
version('4.0.10', sha256='b7f9786d87227fcdc6b84305a6219cd615844f934e3b7c2509e2d4ed095950ab')
version('4.0.9', sha256='345c20dd4e1921e3493200bf9ed4079909fb1277d02faf3136e19f4b90f8aa86')
@@ -42,6 +43,7 @@ class PyUproot(PythonPackage):
depends_on('python@2.6:2,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@42:', type='build', when='@4.1.8:')
depends_on('py-numpy', type=('build', 'run'))
depends_on('xrootd', when="+xrootd")
diff --git a/var/spack/repos/builtin/packages/py-vector-quantize-pytorch/package.py b/var/spack/repos/builtin/packages/py-vector-quantize-pytorch/package.py
new file mode 100644
index 0000000000..747060c868
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-vector-quantize-pytorch/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyVectorQuantizePytorch(PythonPackage):
+ """A vector quantization library originally transcribed
+ from Deepmind's tensorflow implementation, made
+ conveniently into a package. It uses exponential moving
+ averages to update the dictionary."""
+
+ homepage = "https://github.com/lucidrains/vector-quantize-pytorch"
+ pypi = "vector_quantize_pytorch/vector_quantize_pytorch-0.3.9.tar.gz"
+
+ version('0.3.9', sha256='783ca76251299f0e3eb244062bc05c4416bb29157e57077e4a8969c5277f05ee')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-einops', type=('build', 'run'))
+ depends_on('py-torch', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-vector/package.py b/var/spack/repos/builtin/packages/py-vector/package.py
new file mode 100644
index 0000000000..910d100a09
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-vector/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyVector(PythonPackage):
+ """Vector classes and utilities"""
+
+ homepage = "https://github.com/scikit-hep/vector"
+ pypi = "vector/vector-0.8.4.tar.gz"
+
+ version('0.8.4', sha256='ef97bfec0263766edbb74c290401f89921f8d11ae9e4a0ffd904ae40674f1239')
+
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-setuptools-scm@3.4: +toml', type='build')
+ depends_on('py-wheel', type='build')
+ depends_on('py-numpy@1.13.3:', type=('build', 'run'))
+ depends_on('py-packaging@19.0:', type=('build', 'run'))
+ depends_on('py-importlib-metadata@0.22:', type=('build', 'run'), when='^python@:3.7')
+ depends_on('py-typing-extensions', type=('build', 'run'), when='^python@:3.7')
diff --git a/var/spack/repos/builtin/packages/py-virtualenv-clone/package.py b/var/spack/repos/builtin/packages/py-virtualenv-clone/package.py
index 422c6489c8..ee87a76d6f 100644
--- a/var/spack/repos/builtin/packages/py-virtualenv-clone/package.py
+++ b/var/spack/repos/builtin/packages/py-virtualenv-clone/package.py
@@ -12,8 +12,10 @@ class PyVirtualenvClone(PythonPackage):
homepage = "https://github.com/edwardgeorge/virtualenv-clone"
pypi = "virtualenv-clone/virtualenv-clone-0.2.6.tar.gz"
+ version('0.5.7', sha256='418ee935c36152f8f153c79824bb93eaf6f0f7984bae31d3f48f350b9183501a')
version('0.2.6', sha256='6b3be5cab59e455f08c9eda573d23006b7d6fb41fae974ddaa2b275c93cc4405')
- depends_on('python@2.6:')
+ depends_on('python@2.6:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.4:', type=('build', 'run'), when='@0.5.7:')
# not just build-time, requires pkg_resources
depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py
index 6098c804fe..82b500fb75 100644
--- a/var/spack/repos/builtin/packages/py-virtualenv/package.py
+++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py
@@ -12,6 +12,7 @@ class PyVirtualenv(PythonPackage):
homepage = "https://virtualenv.pypa.io/"
pypi = "virtualenv/virtualenv-16.7.6.tar.gz"
+ version('20.10.0', sha256='576d05b46eace16a9c348085f7d0dc8ef28713a2cabaa1cf0aea41e8f12c9218')
version('16.7.6', sha256='5d370508bf32e522d79096e8cbea3499d47e624ac7e11e9089f9397a0b3318df')
version('16.4.1', sha256='5a3ecdfbde67a4a3b3111301c4d64a5b71cf862c8c42958d30cf3253df1f29dd')
version('16.0.0', sha256='ca07b4c0b54e14a91af9f34d0919790b016923d157afda5efdde55c96718f752')
@@ -21,6 +22,18 @@ class PyVirtualenv(PythonPackage):
version('1.11.6', sha256='3e7a4c151e2ee97f51db0215bfd2a073b04a91e9786df6cb67c916f16abe04f7')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@20.10.0:')
# not just build-time, requires pkg_resources
depends_on('py-setuptools@40.6.3:', type=('build', 'run'))
+ depends_on('py-setuptools@41.00.03:', type=('build', 'run'), when='@20.10.0:')
+ depends_on('py-setuptools-scm@2:', type=('build', 'run'), when='@20.10.0:')
+
+ depends_on('py-backports-entry-points-selectable @1.0.4:', type=('build', 'run'), when='@20.10.0:')
+ depends_on('py-distlib@0.3.1:0', type=('build', 'run'), when='@20.10.0:')
+ depends_on('py-filelock@3.2:3', type=('build', 'run'), when='@20.10.0:')
+ depends_on('py-platformdirs@2:2', type=('build', 'run'), when='@20.10.0:')
+ depends_on('py-six@1.9.0:1', type=('build', 'run'), when='@20.10.0:')
+ depends_on('py-importlib-metadata@0.12:', type=('build', 'run'), when='@20.10.0: ^python@:3.7')
+ depends_on('py-importlib-resources@1:', type=('build', 'run'), when='@20.10.0: ^python@:3.6')
+ depends_on('py-pathlib2', type=('build', 'run'), when='@20.10.0: ^python@:3.3')
diff --git a/var/spack/repos/builtin/packages/py-virtualenvwrapper/package.py b/var/spack/repos/builtin/packages/py-virtualenvwrapper/package.py
index 61aab2c210..8687fe4375 100644
--- a/var/spack/repos/builtin/packages/py-virtualenvwrapper/package.py
+++ b/var/spack/repos/builtin/packages/py-virtualenvwrapper/package.py
@@ -16,9 +16,11 @@ class PyVirtualenvwrapper(PythonPackage):
homepage = "https://bitbucket.org/virtualenvwrapper/virtualenvwrapper.git"
pypi = "virtualenvwrapper/virtualenvwrapper-4.8.2.tar.gz"
+ version('4.8.4', sha256='51a1a934e7ed0ff221bdd91bf9d3b604d875afbb3aa2367133503fee168f5bfa')
version('4.8.2', sha256='18d8e4c500c4c4ee794f704e050cf2bbb492537532a4521d1047e7dd1ee4e374')
depends_on('python@2.6:')
+ depends_on('py-pbr', type='build', when='@4.8.4:')
depends_on('py-virtualenv', type=('build', 'run'))
depends_on('py-virtualenv-clone', type=('build', 'run'))
depends_on('py-stevedore', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-warpx/package.py b/var/spack/repos/builtin/packages/py-warpx/package.py
index ba740442c0..e9ef28304d 100644
--- a/var/spack/repos/builtin/packages/py-warpx/package.py
+++ b/var/spack/repos/builtin/packages/py-warpx/package.py
@@ -18,7 +18,7 @@ class PyWarpx(PythonPackage):
"""
homepage = "https://ecp-warpx.github.io"
- url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/21.04.tar.gz"
+ url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/21.12.tar.gz"
git = "https://github.com/ECP-WarpX/WarpX.git"
maintainers = ['ax3l', 'dpgrote', 'RemiLehe']
@@ -27,6 +27,7 @@ class PyWarpx(PythonPackage):
# NOTE: if you update the versions here, also see warpx
version('develop', branch='development')
+ version('21.12', sha256='847c98aac20c73d94c823378803c82be9a14139f1c14ea483757229b452ce4c1')
version('21.11', sha256='ce60377771c732033a77351cd3500b24b5d14b54a5adc7a622767b9251c10d0b')
version('21.10', sha256='d372c573f0360094d5982d64eceeb0149d6620eb75e8fdbfdc6777f3328fb454')
version('21.09', sha256='861a65f11846541c803564db133c8678b9e8779e69902ef1637b21399d257eab')
@@ -39,18 +40,20 @@ class PyWarpx(PythonPackage):
variant('mpi', default=True,
description='Enable MPI support')
- for v in ['21.11', '21.10', '21.09', '21.08', '21.07', '21.06', '21.05',
- '21.04', 'develop']:
+ for v in ['21.12', '21.11', '21.10', '21.09', '21.08', '21.07', '21.06',
+ '21.05', '21.04', 'develop']:
depends_on('warpx@{0}'.format(v),
when='@{0}'.format(v),
type=['build', 'link'])
- depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('python@3.6:3.9', type=('build', 'run'))
depends_on('py-numpy@1.15.0:1', type=('build', 'run'))
- depends_on('py-mpi4py@2.0.0:', type=('build', 'run'), when='+mpi')
+ depends_on('py-mpi4py@2.1.0:', type=('build', 'run'), when='+mpi')
depends_on('py-periodictable@1.5:1', type=('build', 'run'))
- depends_on('py-picmistandard@0.0.14', type=('build', 'run'))
- depends_on('py-setuptools@38.6:', type='build')
+ depends_on('py-picmistandard@0.0.14', type=('build', 'run'), when='@21.03:21.11')
+ depends_on('py-picmistandard@0.0.16', type=('build', 'run'), when='@21.12')
+ depends_on('py-setuptools@42:', type='build')
+ depends_on('py-cmake@3.15:3', type='build')
depends_on('py-wheel', type='build')
depends_on('warpx +lib ~mpi +shared', type=('build', 'link'), when='~mpi')
depends_on('warpx +lib +mpi +shared', type=('build', 'link'), when='+mpi')
diff --git a/var/spack/repos/builtin/packages/py-wcwidth/package.py b/var/spack/repos/builtin/packages/py-wcwidth/package.py
index 67c05b27c4..56688f1835 100644
--- a/var/spack/repos/builtin/packages/py-wcwidth/package.py
+++ b/var/spack/repos/builtin/packages/py-wcwidth/package.py
@@ -11,6 +11,8 @@ class PyWcwidth(PythonPackage):
pypi = "wcwidth/wcwidth-0.1.7.tar.gz"
+ version('0.2.5', sha256='c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83')
version('0.1.7', sha256='3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e')
depends_on('py-setuptools', type='build')
+ depends_on('py-backports-functools-lru-cache@1.2.1:', when='@0.2.5: ^python@:3.1')
diff --git a/var/spack/repos/builtin/packages/py-websocket-client/package.py b/var/spack/repos/builtin/packages/py-websocket-client/package.py
index 3cdef8b4e9..6c26c7b4b0 100644
--- a/var/spack/repos/builtin/packages/py-websocket-client/package.py
+++ b/var/spack/repos/builtin/packages/py-websocket-client/package.py
@@ -8,14 +8,19 @@ class PyWebsocketClient(PythonPackage):
"""WebSocket client for Python. hybi13 is supported."""
homepage = "https://github.com/websocket-client/websocket-client.git"
- pypi = "websocket_client/websocket_client-0.57.0.tar.gz"
+ pypi = "websocket-client/websocket-client-0.57.0.tar.gz"
- version('0.57.0', sha256='d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010')
- version('0.56.0', sha256='1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a')
- version('0.48.0', sha256='18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a')
+ version('1.2.1', sha256='8dfb715d8a992f5712fff8c843adae94e22b22a99b2c5e6b0ec4a1a981cc4e0d')
+ version('0.57.0', sha256='d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010',
+ url='https://files.pythonhosted.org/packages/source/w/websocket_client/websocket_client-0.57.0.tar.gz')
+ version('0.56.0', sha256='1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a',
+ url='https://files.pythonhosted.org/packages/source/w/websocket_client/websocket_client-0.56.0.tar.gz')
+ version('0.48.0', sha256='18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a',
+ url='https://files.pythonhosted.org/packages/source/w/websocket_client/websocket_client-0.48.0.tar.gz')
depends_on('python@2.6:2.8,3.4:', type=('build', 'run'))
+ depends_on('python@3.6:', type=('build', 'run'), when='@1.2.1:')
depends_on('py-setuptools', type='build')
- depends_on('py-six', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'), when='@:1.2.0')
depends_on('py-backports-ssl-match-hostname', when='^python@2.6:2.7.9', type=('build', 'run'))
depends_on('py-argparse', when='^python@:2.6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-werkzeug/package.py b/var/spack/repos/builtin/packages/py-werkzeug/package.py
index 8d4f3c0e84..8d54551bc9 100644
--- a/var/spack/repos/builtin/packages/py-werkzeug/package.py
+++ b/var/spack/repos/builtin/packages/py-werkzeug/package.py
@@ -12,6 +12,7 @@ class PyWerkzeug(PythonPackage):
homepage = "http://werkzeug.pocoo.org"
pypi = "Werkzeug/Werkzeug-0.16.0.tar.gz"
+ version('2.0.2', sha256='aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a')
version('0.16.0', sha256='7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7')
version('0.15.6', sha256='0a24d43be6a7dce81bae05292356176d6c46d63e42a0dd3f9504b210a9cfaa43')
version('0.15.5', sha256='a13b74dd3c45f758d4ebdb224be8f1ab8ef58b3c0ffc1783a8c7d9f4f50227e6')
@@ -23,5 +24,7 @@ class PyWerkzeug(PythonPackage):
version('0.11.15', sha256='455d7798ac263266dbd38d4841f7534dd35ca9c3da4a8df303f8488f38f3bcc0')
version('0.11.11', sha256='e72c46bc14405cba7a26bd2ce28df734471bc9016bc8b4cb69466c2c14c2f7e5')
+ depends_on('python@3.6:', when='@2:', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-dataclasses', when='@2: ^python@:3.6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py
index a5737d5b92..f90ac695c0 100644
--- a/var/spack/repos/builtin/packages/py-wheel/package.py
+++ b/var/spack/repos/builtin/packages/py-wheel/package.py
@@ -12,6 +12,7 @@ class PyWheel(PythonPackage):
homepage = "https://github.com/pypa/wheel"
pypi = "wheel/wheel-0.34.2.tar.gz"
+ version('0.37.0', sha256='e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad')
version('0.36.2', sha256='e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e')
version('0.35.1', sha256='99a22d87add3f634ff917310a3d87e499f19e663413a52eb9232c447aa646c9f')
version('0.34.2', sha256='8788e9155fe14f54164c1b9eb0a319d98ef02c160725587ad60f14ddc57b6f96')
diff --git a/var/spack/repos/builtin/packages/py-wrapt/package.py b/var/spack/repos/builtin/packages/py-wrapt/package.py
index 6d2aec630e..48f085ed08 100644
--- a/var/spack/repos/builtin/packages/py-wrapt/package.py
+++ b/var/spack/repos/builtin/packages/py-wrapt/package.py
@@ -12,7 +12,12 @@ class PyWrapt(PythonPackage):
homepage = "https://github.com/GrahamDumpleton/wrapt"
pypi = "wrapt/wrapt-1.11.2.tar.gz"
+ version('1.13.3', sha256='1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185')
version('1.12.1', sha256='b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7')
- version('1.11.2', sha256='565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1')
- version('1.11.1', sha256='4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533')
+ version('1.11.2', sha256='565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1')
+ version('1.11.1', sha256='4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533')
version('1.10.10', sha256='42160c91b77f1bc64a955890038e02f2f72986c01d462d53cb6cb039b995cdd9')
+
+ depends_on('python@2.7:2,3.5:', when='@1.13.1:', type=('build', 'run'))
+ depends_on('python@2.7:2,3.3:', when='@1.13:', type=('build', 'run'))
+ depends_on('py-setuptools@38.3:', when='@1.13:', type='build')
diff --git a/var/spack/repos/builtin/packages/py-wurlitzer/package.py b/var/spack/repos/builtin/packages/py-wurlitzer/package.py
new file mode 100644
index 0000000000..6924ce16b0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-wurlitzer/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyWurlitzer(PythonPackage):
+ """Capture C-level stdout/stderr pipes in Python via os.dup2."""
+
+ pypi = 'wurlitzer/wurlitzer-3.0.2.tar.gz'
+
+ maintainers = ['sethrj']
+
+ version('3.0.2', sha256='36051ac530ddb461a86b6227c4b09d95f30a1d1043de2b4a592e97ae8a84fcdf')
+
+ depends_on('python+ctypes@3.5:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ # In some circumstances (unclear exactly what) Wurlitzer is unable to get
+ # stdout/stderr pointers from ctypes, so it falls back to trying to use
+ # cffi. If you encounter this, please add the dependency below.
+ # depends_on('py-cffi', type='run', when='...????')
diff --git a/var/spack/repos/builtin/packages/py-xrootdpyfs/package.py b/var/spack/repos/builtin/packages/py-xrootdpyfs/package.py
new file mode 100644
index 0000000000..1b9a635469
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-xrootdpyfs/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyXrootdpyfs(PythonPackage):
+ """XRootDPyFS is a PyFilesystem interface to XRootD."""
+
+ homepage = "http://github.com/inveniosoftware/xrootdpyfs/"
+ pypi = "xrootdpyfs/xrootdpyfs-0.2.2.tar.gz"
+
+ version('0.2.2', sha256='43698c260f3ec52320c6bfac8dd3e7c2be7d28e9e9f58edf4f916578114e82bf')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-fs@0.5.4:1', type=('build', 'run'))
+ depends_on('xrootd@4.8.4:4 +python', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-yarl/package.py b/var/spack/repos/builtin/packages/py-yarl/package.py
index ccd33ef75a..6208fbf766 100644
--- a/var/spack/repos/builtin/packages/py-yarl/package.py
+++ b/var/spack/repos/builtin/packages/py-yarl/package.py
@@ -10,19 +10,27 @@ class PyYarl(PythonPackage):
homepage = "https://github.com/aio-libs/yarl"
url = "https://github.com/aio-libs/yarl/archive/v1.4.2.tar.gz"
+ version('1.7.2', sha256='19b94c68e8eda5731f87d79e3c34967a11e69695965113c4724d2491f76ad461')
version('1.4.2', sha256='a400eb3f54f7596eeaba8100a8fa3d72135195423c52808dc54a43c6b100b192')
depends_on('python@3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools@40:', type='build', when='@1.7.2:')
depends_on('py-cython', type='build')
depends_on('py-multidict@4.0:', type=('build', 'run'))
depends_on('py-idna@2.0:', type=('build', 'run'))
+ depends_on('py-typing-extensions@3.7.4:', type=('build', 'run'), when='@1.7.2: ^python@:3.7')
@run_before('build')
def fix_cython(self):
+ if self.spec.satisfies('@1.7.2:'):
+ pyxfile = 'yarl/_quoting_c'
+ else:
+ pyxfile = 'yarl/_quoting'
+
cython = self.spec['py-cython'].command
cython('-3',
'-o',
- 'yarl/_quoting.c',
- 'yarl/_quoting.pyx',
+ pyxfile + '.c',
+ pyxfile + '.pyx',
'-Iyarl')
diff --git a/var/spack/repos/builtin/packages/py-yq/package.py b/var/spack/repos/builtin/packages/py-yq/package.py
new file mode 100644
index 0000000000..08b6068e79
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-yq/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyYq(PythonPackage):
+ """yq takes YAML input, converts it to JSON, and pipes it to jq"""
+
+ homepage = "https://github.com/kislyuk/yq"
+ pypi = "yq/yq-2.12.2.tar.gz"
+
+ maintainers = ['qwertos']
+
+ version('2.12.2', sha256='2f156d0724b61487ac8752ed4eaa702a5737b804d5afa46fa55866951cd106d2')
+
+ depends_on('py-setuptools', type=('build', 'run'))
+ depends_on('py-toml@0.10.0:', type=('build', 'run'))
+ depends_on('py-pyyaml@3.11:', type=('build', 'run'))
+ depends_on('py-argcomplete@1.8.1:', type=('build', 'run'))
+ depends_on('py-xmltodict@0.11.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-ytopt-autotune/package.py b/var/spack/repos/builtin/packages/py-ytopt-autotune/package.py
new file mode 100644
index 0000000000..19f90a2aa8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ytopt-autotune/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyYtoptAutotune(PythonPackage):
+ """Common interface for autotuning search space and method definition."""
+
+ maintainers = ['Kerilk', 'liuyangzhuan']
+
+ homepage = "https://github.com/ytopt-team/autotune"
+ url = "https://github.com/ytopt-team/autotune/archive/refs/tags/v1.1.0.tar.gz"
+ git = "https://github.com/ytopt-team/autotune.git"
+
+ version('master', branch='master')
+ version('1.1.0', sha256='5ee7fa6a1c83131c5ceba1537b25f00de84182e4d0e6ebd0fd6efa4e8aee1bc4')
+
+ patch('version.patch', when='@1.1.0')
+
+ depends_on('python@3:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-ytopt-autotune/version.patch b/var/spack/repos/builtin/packages/py-ytopt-autotune/version.patch
new file mode 100644
index 0000000000..2e00b9b245
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ytopt-autotune/version.patch
@@ -0,0 +1,10 @@
+diff --git a/autotune/__init__.py b/autotune/__init__.py
+index 5636cf1..6c9488d 100644
+--- a/autotune/__init__.py
++++ b/autotune/__init__.py
+@@ -1,4 +1,4 @@
+-from autotune.__version__ import __version__, __version_suffix__
++from autotune.__version__ import __version__
+ name = 'autotune'
+ version = __version__
+
diff --git a/var/spack/repos/builtin/packages/py-ytopt/package.py b/var/spack/repos/builtin/packages/py-ytopt/package.py
index f5e33b8f18..85d260d35e 100644
--- a/var/spack/repos/builtin/packages/py-ytopt/package.py
+++ b/var/spack/repos/builtin/packages/py-ytopt/package.py
@@ -10,14 +10,22 @@ class PyYtopt(PythonPackage):
"""Ytopt package implements search using Random Forest (SuRF), an autotuning
search method developed within Y-Tune ECP project."""
- homepage = "https://xgitlab.cels.anl.gov/pbalapra/ytopt"
- url = "https://xgitlab.cels.anl.gov/pbalapra/ytopt/raw/release/dist/ytopt-0.1.0.tar.gz"
+ maintainers = ['Kerilk']
- version('0.1.0', sha256='c7081fe3585a5b7a25bcb84733cd2326b72de3bfc4f84d6ad110341f24c3e612')
+ homepage = "https://github.com/ytopt-team/ytopt"
+ url = "https://github.com/ytopt-team/ytopt/archive/refs/tags/v0.0.1.tar.gz"
- depends_on('py-scikit-learn', type=('build', 'run'))
- depends_on('py-scikit-optimize', type=('build', 'run'))
+ version('0.0.2', sha256='5a624aa678b976ff6ef867610bafcb0dfd5c8af0d880138ca5d56d3f776e6d71')
+ version('0.0.1', sha256='3ca616922c8e76e73f695a5ddea5dd91b0103eada726185f008343cc5cbd7744')
- def build_args(self, spec, prefix):
- args = []
- return args
+ depends_on('python@3.6:', type=('build', 'run'))
+ depends_on('py-scikit-learn@0.23.1', type=('build', 'run'))
+ depends_on('py-dh-scikit-optimize', type=('build', 'run'))
+ depends_on('py-configspace', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-ytopt-autotune@1.1:', type=('build', 'run'))
+ depends_on('py-joblib', type=('build', 'run'))
+ depends_on('py-deap', type=('build', 'run'))
+ depends_on('py-tqdm', type=('build', 'run'))
+ depends_on('py-ray', type=('build', 'run'))
+ depends_on('py-mpi4py@3.0.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-zope-event/package.py b/var/spack/repos/builtin/packages/py-zope-event/package.py
index 405c2cf2d0..36dbd7bf85 100644
--- a/var/spack/repos/builtin/packages/py-zope-event/package.py
+++ b/var/spack/repos/builtin/packages/py-zope-event/package.py
@@ -12,6 +12,7 @@ class PyZopeEvent(PythonPackage):
homepage = "https://github.com/zopefoundation/zope.event"
pypi = "zope.event/zope.event-4.3.0.tar.gz"
+ version('4.5.0', sha256='5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330')
version('4.3.0', sha256='e0ecea24247a837c71c106b0341a7a997e3653da820d21ef6c08b32548f733e7')
- depends_on('py-setuptools', type='build')
+ depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-zope-interface/package.py b/var/spack/repos/builtin/packages/py-zope-interface/package.py
index b46605ee62..8e20f5f006 100644
--- a/var/spack/repos/builtin/packages/py-zope-interface/package.py
+++ b/var/spack/repos/builtin/packages/py-zope-interface/package.py
@@ -15,6 +15,7 @@ class PyZopeInterface(PythonPackage):
homepage = "https://github.com/zopefoundation/zope.interface"
pypi = "zope.interface/zope.interface-4.5.0.tar.gz"
+ version('5.4.0', sha256='5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e')
version('5.1.0', sha256='40e4c42bd27ed3c11b2c983fecfb03356fae1209de10686d03c02c8696a1d90e')
version('4.5.0', sha256='57c38470d9f57e37afb460c399eb254e7193ac7fb8042bd09bdc001981a9c74c')
diff --git a/var/spack/repos/builtin/packages/pygmo/package.py b/var/spack/repos/builtin/packages/pygmo/package.py
new file mode 100644
index 0000000000..80c5abe893
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pygmo/package.py
@@ -0,0 +1,35 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Pygmo(CMakePackage):
+ """Parallel Global Multiobjective Optimizer (and its Python alter ego
+ PyGMO) is a C++ / Python platform to perform parallel computations of
+ optimisation tasks (global and local) via the asynchronous generalized
+ island model."""
+
+ homepage = "https://esa.github.io/pygmo2/"
+ url = "https://github.com/esa/pygmo2/archive/v2.18.0.tar.gz"
+ git = "https://github.com/esa/pygmo2.git"
+
+ version('master', branch='master')
+ version('2.18.0', sha256='9f081cc973297894af09f713f889870ac452bfb32b471f9f7ba08a5e0bb9a125')
+
+ depends_on('pagmo2', type=('build', 'link'))
+ depends_on('mpi', type='build')
+ depends_on('py-pybind11@2.6.0:2.6.2', type='build')
+ depends_on('cmake@3.1:', type='build')
+
+ variant('shared', default=True, description='Build shared libraries')
+
+ def cmake_args(self):
+
+ args = [
+ self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
+ ]
+
+ return args
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index 01d0d9a065..141d5c703a 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -27,14 +27,19 @@ class Python(AutotoolsPackage):
maintainers = ['adamjstewart', 'skosukhin']
+ version('3.10.1', sha256='b76117670e7c5064344b9c138e141a377e686b9063f3a8a620ff674fa8ec90d3')
+ version('3.10.0', sha256='c4e0cbad57c90690cb813fb4663ef670b4d0f587d8171e2c42bd4c9245bd2758')
+ version('3.9.9', sha256='2cc7b67c1f3f66c571acc42479cdf691d8ed6b47bee12c9b68430413a17a44ea', preferred=True)
+ version('3.9.8', sha256='7447fb8bb270942d620dd24faa7814b1383b61fa99029a240025fd81c1db8283')
version('3.9.7', sha256='a838d3f9360d157040142b715db34f0218e535333696a5569dc6f854604eb9d1')
version('3.9.6', sha256='d0a35182e19e416fc8eae25a3dcd4d02d4997333e4ad1f2eee6010aadc3fe866')
version('3.9.5', sha256='e0fbd5b6e1ee242524430dee3c91baf4cbbaba4a72dd1674b90fda87b713c7ab')
version('3.9.4', sha256='66c4de16daa74a825cf9da9ddae1fe020b72c3854b73b1762011cc33f9e4592f')
+ version('3.9.3', sha256='3afeb61a45b5a2e6f1c0f621bd8cf925a4ff406099fdb3d8c97b993a5f43d048')
version('3.9.2', sha256='7899e8a6f7946748830d66739f2d8f2b30214dad956e56b9ba216b3de5581519')
version('3.9.1', sha256='29cb91ba038346da0bd9ab84a0a55a845d872c341a4da6879f462e94c741f117')
version('3.9.0', sha256='df796b2dc8ef085edae2597a41c1c0a63625ebd92487adaef2fed22b567873e8')
- version('3.8.12', sha256='316aa33f3b7707d041e73f246efedb297a70898c4b91f127f66dc8d80c596f1a', preferred=True)
+ version('3.8.12', sha256='316aa33f3b7707d041e73f246efedb297a70898c4b91f127f66dc8d80c596f1a')
version('3.8.11', sha256='b77464ea80cec14581b86aeb7fb2ff02830e0abc7bcdc752b7b4bdfcd8f3e393')
version('3.8.10', sha256='b37ac74d2cbad2590e7cd0dd2b3826c29afe89a734090a87bf8c03c45066cb65')
version('3.8.9', sha256='9779ec1df000bf86914cdd40860b88da56c1e61db59d37784beca14a259ac9e9')
@@ -205,7 +210,8 @@ class Python(AutotoolsPackage):
patch('tkinter.patch', when='@:2.8,3.3:3.7 platform=darwin')
# Patch the setup script to deny that tcl/x11 exists rather than allowing
# autodetection of (possibly broken) system components
- patch('tkinter-3.8.patch', when='@3.8: ~tkinter')
+ patch('tkinter-3.8.patch', when='@3.8:3.9 ~tkinter')
+ patch('tkinter-3.10.patch', when='@3.10: ~tkinter')
# Ensure that distutils chooses correct compiler option for RPATH on cray:
patch('cray-rpath-2.3.patch', when='@2.3:3.0.1 platform=cray')
diff --git a/var/spack/repos/builtin/packages/python/tkinter-3.10.patch b/var/spack/repos/builtin/packages/python/tkinter-3.10.patch
new file mode 100644
index 0000000000..e06be826b2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/python/tkinter-3.10.patch
@@ -0,0 +1,11 @@
+--- a/setup.py 2021-12-06 12:23:39.000000000 -0600
++++ b/setup.py 2021-12-14 10:30:33.000000000 -0600
+@@ -2099,6 +2099,8 @@
+ #
+ # Detection stops at the first successful method.
+
++ return False
++
+ # Check for Tcl and Tk at the locations indicated by _TCLTK_INCLUDES
+ # and _TCLTK_LIBS environment variables.
+ if self.detect_tkinter_fromenv():
diff --git a/var/spack/repos/builtin/packages/qmcpack/package.py b/var/spack/repos/builtin/packages/qmcpack/package.py
index a7124fe946..4445c43f95 100644
--- a/var/spack/repos/builtin/packages/qmcpack/package.py
+++ b/var/spack/repos/builtin/packages/qmcpack/package.py
@@ -15,7 +15,7 @@ class Qmcpack(CMakePackage, CudaPackage):
# Package information
homepage = "https://www.qmcpack.org/"
git = "https://github.com/QMCPACK/qmcpack.git"
- maintainers = ['naromero77']
+ maintainers = ['ye-luo']
tags = ['ecp', 'ecp-apps']
# This download method is untrusted, and is not recommended by the
@@ -97,6 +97,12 @@ class Qmcpack(CMakePackage, CudaPackage):
conflicts('^openblas+ilp64',
msg='QMCPACK does not support OpenBLAS 64-bit integer variant')
+ conflicts('^openblas threads=none',
+ msg='QMCPACK does not support OpenBLAS without threading')
+
+ conflicts('^openblas threads=pthreads',
+ msg='QMCPACK does not support OpenBLAS with pthreads')
+
conflicts('cuda_arch=none',
when='+cuda',
msg='A value for cuda_arch must be specified. Add cuda_arch=XX')
@@ -210,6 +216,13 @@ class Qmcpack(CMakePackage, CudaPackage):
return targets
+ # QMCPACK prefers taking MPI compiler wrappers as CMake compilers.
+ def setup_build_environment(self, env):
+ spec = self.spec
+ if '+mpi' in spec:
+ env.set('CC', spec['mpi'].mpicc)
+ env.set('CXX', spec['mpi'].mpicxx)
+
def cmake_args(self):
spec = self.spec
args = []
@@ -347,20 +360,11 @@ class Qmcpack(CMakePackage, CudaPackage):
return args
- # QMCPACK needs custom install method for a couple of reasons:
- # Firstly, wee follow the recommendation on the Spack website
- # for defining the compilers variables to be the MPI compiler wrappers.
- # https://spack.readthedocs.io/en/latest/packaging_guide.html#compiler-wrappers
- #
+ # QMCPACK needs custom install method for the following reason:
# Note that 3.6.0 release and later has a functioning 'make install',
# but still does not install nexus, manual, etc. So, there is no compelling
# reason to use QMCPACK's built-in version at this time.
def install(self, spec, prefix):
- if '+mpi' in spec:
- env['CC'] = spec['mpi'].mpicc
- env['CXX'] = spec['mpi'].mpicxx
- env['F77'] = spec['mpi'].mpif77
- env['FC'] = spec['mpi'].mpifc
# create top-level directory
mkdirp(prefix)
diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py
index 2b96ddde3d..0073b1db56 100644
--- a/var/spack/repos/builtin/packages/qt/package.py
+++ b/var/spack/repos/builtin/packages/qt/package.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import os
+import platform
import sys
import llnl.util.tty as tty
@@ -12,6 +13,7 @@ from spack import *
from spack.operating_systems.mac_os import macos_version
MACOS_VERSION = macos_version() if sys.platform == 'darwin' else None
+LINUX_VERSION = Version(platform.release()) if platform.system() == 'Linux' else None
class Qt(Package):
@@ -191,10 +193,25 @@ class Qt(Package):
depends_on("flex", type='build')
depends_on("bison", type='build')
depends_on("gperf")
- depends_on("python@2.7.5:2", type='build')
+
+ # qtwebengine@5.7:5.15 are based on Google Chromium versions which depend on Py2
+ with when('@5.7:5.15'):
+ depends_on('python@2.7.5:2', type='build')
+ # mesa inherits MesonPackage (since October 2020) which depends on Py@3.
+ # The conflicts('mesa') enables a regular build of `qt@5.7:5.15+webkit`
+ # without having to specify the exact version by causing the concretizer
+ # to select mesa18 which does not depend on python@3.
+ conflicts('mesa')
+
+ with when('@5.10:'):
+ depends_on('nss@3.62:')
with when('@5.7:'):
- depends_on("nss")
+ # https://www.linuxfromscratch.org/blfs/view/svn/x/qtwebengine.html
+ depends_on('ninja', type='build')
+
+ # https://doc.qt.io/qt-5.15/qtwebengine-platform-notes.html
+ with when('@5.7: platform=linux'):
depends_on("libdrm")
depends_on("libxcomposite")
depends_on("libxcursor")
@@ -637,6 +654,20 @@ class Qt(Package):
config_args.append('-I{0}/include'.format(spec['libx11'].prefix))
config_args.append('-I{0}/include'.format(spec['xproto'].prefix))
+ # If the version of glibc is new enough Qt will configure features that
+ # may not be supported by the kernel version on the system. This will
+ # cause errors like:
+ # error while loading shared libraries: libQt5Core.so.5: cannot open
+ # shared object file: No such file or directory
+ # Test the kernel version and disable features that Qt detects in glibc
+ # but that are not supported in the kernel as determined by information
+ # in: qtbase/src/corelib/global/minimum-linux_p.h.
+ if LINUX_VERSION and version >= Version('5.10'):
+ if LINUX_VERSION < Version('3.16'):
+ config_args.append('-no-feature-renameat2')
+ if LINUX_VERSION < Version('3.17'):
+ config_args.append('-no-feature-getentropy')
+
if '~webkit' in spec:
config_args.extend([
'-skip',
diff --git a/var/spack/repos/builtin/packages/quantum-espresso/configure_aocc.patch b/var/spack/repos/builtin/packages/quantum-espresso/configure_aocc.patch
index fcfae27854..c6bea9a18b 100644
--- a/var/spack/repos/builtin/packages/quantum-espresso/configure_aocc.patch
+++ b/var/spack/repos/builtin/packages/quantum-espresso/configure_aocc.patch
@@ -2,14 +2,14 @@ diff --git a/install/configure b/install/configure_aocc
index 66337d1..d2c04af 100755
--- a/install/configure
+++ b/install/configure_aocc
-@@ -3203,6 +3203,7 @@ case "$arch" in
+@@ -3199,6 +3199,7 @@
+ ifort_version=`$mpif90 -V 2>&1 | grep "Intel(R)"`
+ pgf_version=`$mpif90 -V 2>&1 | grep "^pgf"`
+ nvfortran_version=`$mpif90 -V 2>&1 | grep "^nvfortran"`
++ aoccflang_version=`$mpif90 -v 2>&1 | grep "AMD clang version"`
+ gfortran_version=`$mpif90 -v 2>&1 | grep "gcc version"`
nagfor_version=`$mpif90 -v 2>&1 | grep "NAG Fortran"`
xlf_version=`$mpif90 -v 2>&1 | grep "xlf"`
- armflang_version=`$mpif90 -v 2>&1 | grep "Arm C/C++/Fortran Compiler version"`
-+ aoccflang_version=`$mpif90 -v 2>&1 | grep "AMD clang version"`
- #
- if test "$ifort_version" != ""
- then
@@ -3215,6 +3216,12 @@ case "$arch" in
version=`echo $nvfortran_version | cut -d ' ' -f2`
echo "${ECHO_T}nvfortran $version"
diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py
index 3753504c1e..f2dcd3b666 100644
--- a/var/spack/repos/builtin/packages/quantum-espresso/package.py
+++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py
@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-class QuantumEspresso(Package):
+class QuantumEspresso(CMakePackage):
"""Quantum ESPRESSO is an integrated suite of Open-Source computer codes
for electronic-structure calculations and materials modeling at the
nanoscale. It is based on density-functional theory, plane waves, and
@@ -15,9 +15,10 @@ class QuantumEspresso(Package):
url = 'https://gitlab.com/QEF/q-e/-/archive/qe-6.6/q-e-qe-6.6.tar.gz'
git = 'https://gitlab.com/QEF/q-e.git'
- maintainers = ['naromero77']
+ maintainers = ['ye-luo']
version('develop', branch='develop')
+ version('6.8', sha256='654855c69864de7ece5ef2f2c0dea2d32698fe51192a8646b1555b0c57e033b2')
version('6.7', sha256='fe0ce74ff736b10d2a20c9d59025c01f88f86b00d229c123b1791f1edd7b4315',
url='https://gitlab.com/QEF/q-e/-/archive/qe-6.7MaX-Release/q-e-qe-6.7MaX-Release.tar.gz'
)
@@ -47,22 +48,28 @@ class QuantumEspresso(Package):
destination='.'
)
- variant('mpi', default=True, description='Builds with mpi support')
- variant('openmp', default=False, description='Enables openMP support')
- variant('scalapack', default=True, description='Enables scalapack support')
- variant('elpa', default=False, description='Uses elpa as an eigenvalue solver')
+ variant('cmake', default=True, description='Builds via CMake')
+ with when('+cmake'):
+ depends_on("cmake@3.14.0:", type="build")
+ conflicts('@:6.7', msg='+cmake works since QE v6.8')
- # Support for HDF5 has been added starting in version 6.1.0 and is
- # still experimental, therefore we default to False for the variant
- variant(
- 'hdf5', default='none', description='Builds with HDF5 support',
- values=('parallel', 'serial', 'none'), multi=False
- )
+ variant('libxc', default=False, description='Uses libxc')
+ depends_on('libxc@5.1.2:', when='+libxc')
- # Enables building Electron-phonon Wannier 'epw.x' executable
- # http://epw.org.uk/Main/About
- variant('epw', default=False,
- description='Builds Electron-phonon Wannier executable')
+ # TODO
+ # variant(
+ # 'gpu', default='none', description='Builds with GPU support',
+ # values=('nvidia', 'none'), multi=False
+ # )
+
+ variant('openmp', default=False, description='Enables openMP support')
+ # Need OpenMP threaded FFTW and BLAS libraries when configured
+ # with OpenMP support
+ with when('+openmp'):
+ conflicts('^fftw~openmp')
+ conflicts('^amdfftw~openmp')
+ conflicts('^openblas threads=none')
+ conflicts('^openblas threads=pthreads')
# Apply upstream patches by default. Variant useful for 3rd party
# patches which are incompatible with upstream patches
@@ -70,51 +77,36 @@ class QuantumEspresso(Package):
desc = desc + 'to False for third party patches or plugins'
variant('patch', default=True, description=desc)
- # QMCPACK converter patch
- # https://github.com/QMCPACK/qmcpack/tree/develop/external_codes/quantum_espresso
- variant('qmcpack', default=False,
- description='Build QE-to-QMCPACK wave function converter')
+ variant('mpi', default=True, description='Builds with mpi support')
+ with when('+mpi'):
+ depends_on('mpi')
+ variant('scalapack', default=True, description='Enables scalapack support')
+
+ with when('+scalapack'):
+ depends_on('scalapack')
+ variant('elpa', default=False, description='Uses elpa as an eigenvalue solver')
+
+ with when('+elpa'):
+ # CMake builds only support elpa without openmp
+ depends_on('elpa~openmp', when='+cmake')
+ depends_on('elpa+openmp', when='+openmp~cmake')
+ depends_on('elpa~openmp', when='~openmp~cmake')
+ # Elpa is formally supported by @:5.4.0, but QE configure searches
+ # for it in the wrong folders (or tries to download it within
+ # the build directory). Instead of patching Elpa to provide the
+ # folder QE expects as a link, we issue a conflict here.
+ conflicts('@:5.4.0', msg='+elpa requires QE >= 6.0')
- variant('environ', default=False,
- description='Enables support for introducing environment effects '
- 'into atomistic first-principles simulations.'
- 'See http://quantum-environ.org/about.html')
+ # Support for HDF5 has been added starting in version 6.1.0 and is
+ # still experimental, therefore we default to False for the variant
+ variant(
+ 'hdf5', default='none', description='Builds with HDF5 support',
+ values=('parallel', 'serial', 'none'), multi=False
+ )
- # Dependencies
- depends_on('blas')
- depends_on('lapack')
- depends_on('fftw-api@3')
- depends_on('mpi', when='+mpi')
- depends_on('scalapack', when='+scalapack+mpi')
- depends_on('elpa+openmp', when='+elpa+openmp')
- depends_on('elpa~openmp', when='+elpa~openmp')
# Versions of HDF5 prior to 1.8.16 lead to QE runtime errors
depends_on('hdf5@1.8.16:+fortran+hl+mpi', when='hdf5=parallel')
depends_on('hdf5@1.8.16:+fortran+hl~mpi', when='hdf5=serial')
- depends_on('hdf5', when='+qmcpack')
- # TODO: enable building EPW when ~mpi
- depends_on('mpi', when='+epw')
-
- # CONFLICTS SECTION
- # Omitted for now due to concretizer bug
- # MKL with 64-bit integers not supported.
- # conflicts(
- # '^mkl+ilp64',
- # msg='Quantum ESPRESSO does not support MKL 64-bit integer variant'
- # )
-
- # We can't ask for scalapack or elpa if we don't want MPI
- conflicts(
- '+scalapack',
- when='~mpi',
- msg='scalapack is a parallel library and needs MPI support'
- )
-
- conflicts(
- '+elpa',
- when='~mpi',
- msg='elpa is a parallel library and needs MPI support'
- )
# HDF5 support introduced in 6.1.0, but the configure had some limitations.
# In recent tests (Oct 2019), GCC and Intel work with the HDF5 Spack
@@ -140,60 +132,82 @@ class QuantumEspresso(Package):
msg='parallel HDF5 requires MPI support'
)
- # Elpa is formally supported by @:5.4.0, but QE configure searches
- # for it in the wrong folders (or tries to download it within
- # the build directory). Instead of patching Elpa to provide the
- # folder QE expects as a link, we issue a conflict here.
- conflicts('+elpa', when='@:5.4.0')
+ # QMCPACK converter patch
+ # https://github.com/QMCPACK/qmcpack/tree/develop/external_codes/quantum_espresso
+ variant('qmcpack', default=False,
+ description='Build QE-to-QMCPACK wave function converter')
- # Some QMCPACK converters are incompatible with upstream patches.
- # HDF5 is a hard requirement. Need to do two HDF5 cases explicitly
- # since Spack lacks support for expressing NOT operation.
- conflicts(
- '@6.4+patch',
- when='+qmcpack',
- msg='QE-to-QMCPACK wave function converter requires '
- 'deactivatation of upstream patches'
- )
- conflicts(
- '@6.3:6.4.0 hdf5=serial',
- when='+qmcpack',
- msg='QE-to-QMCPACK wave function converter only '
- 'supported with parallel HDF5'
- )
- conflicts(
- 'hdf5=none',
- when='+qmcpack',
- msg='QE-to-QMCPACK wave function converter requires HDF5'
- )
+ with when('+qmcpack'):
+ # Some QMCPACK converters are incompatible with upstream patches.
+ # HDF5 is a hard requirement. Need to do two HDF5 cases explicitly
+ # since Spack lacks support for expressing NOT operation.
+ conflicts(
+ '@6.4+patch',
+ msg='QE-to-QMCPACK wave function converter requires '
+ 'deactivatation of upstream patches'
+ )
+ conflicts(
+ '@6.3:6.4.0 hdf5=serial',
+ msg='QE-to-QMCPACK wave function converter only '
+ 'supported with parallel HDF5'
+ )
+ conflicts(
+ 'hdf5=none',
+ msg='QE-to-QMCPACK wave function converter requires HDF5'
+ )
- # The first version of Q-E to feature integrated EPW is 6.0.0,
- # as per http://epw.org.uk/Main/DownloadAndInstall .
- # Complain if trying to install a version older than this.
- conflicts('+epw', when='@:5',
- msg='EPW only available from version 6.0.0 and on')
+ # Enables building Electron-phonon Wannier 'epw.x' executable
+ # http://epw.org.uk/Main/About
+ variant('epw', default=False,
+ description='Builds Electron-phonon Wannier executable')
+ conflicts('~epw', when='+cmake', msg='epw cannot be turned off when using CMake')
+
+ with when('+epw'):
+ # The first version of Q-E to feature integrated EPW is 6.0.0,
+ # as per http://epw.org.uk/Main/DownloadAndInstall .
+ # Complain if trying to install a version older than this.
+ conflicts('@:5', msg='EPW only available from version 6.0.0 and on')
+
+ # Below goes some constraints as shown in the link above.
+ # Constraints may be relaxed as successful reports
+ # of different compiler+mpi combinations arrive
- # Below goes some constraints as shown in the link above.
- # Constraints may be relaxed as successful reports
- # of different compiler+mpi combinations arrive
+ # TODO: enable building EPW when ~mpi and ~cmake
+ conflicts('~mpi', when='~cmake', msg='EPW needs MPI when ~cmake')
- # TODO: enable building EPW when ~mpi
- conflicts('+epw', when='~mpi', msg='EPW needs MPI')
+ # EPW doesn't gets along well with OpenMPI 2.x.x
+ conflicts('^openmpi@2.0.0:2',
+ msg='OpenMPI version incompatible with EPW')
- # EPW doesn't gets along well with OpenMPI 2.x.x
- conflicts('+epw', when='^openmpi@2.0.0:2',
- msg='OpenMPI version incompatible with EPW')
+ # EPW also doesn't gets along well with PGI 17.x + OpenMPI 1.10.7
+ conflicts('^openmpi@1.10.7%pgi@17.0:17.12',
+ msg='PGI+OpenMPI version combo incompatible with EPW')
- # EPW also doesn't gets along well with PGI 17.x + OpenMPI 1.10.7
- conflicts('+epw', when='^openmpi@1.10.7%pgi@17.0:17.12',
- msg='PGI+OpenMPI version combo incompatible with EPW')
+ variant('environ', default=False,
+ description='Enables support for introducing environment effects '
+ 'into atomistic first-principles simulations.'
+ 'See http://quantum-environ.org/about.html')
+ conflicts('+environ', when='+cmake', msg='environ doesn\'t work with CMake')
+
+ # Dependencies not affected by variants
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('fftw-api@3')
+
+ # CONFLICTS SECTION
+ # Omitted for now due to concretizer bug
+ # MKL with 64-bit integers not supported.
+ # conflicts(
+ # '^mkl+ilp64',
+ # msg='Quantum ESPRESSO does not support MKL 64-bit integer variant'
+ # )
# PATCHES SECTION
# THIRD-PARTY PATCHES
# NOTE: *SOME* third-party patches will require deactivation of
# upstream patches using `~patch` variant
- # QMCPACK converter patches for QE 6.7, 6.4.1, 6.4, and 6.3
+ # QMCPACK converter patches for QE 6.8, 6.7, 6.4.1, 6.4, and 6.3
conflicts('@:6.2,6.5:6.6', when='+qmcpack',
msg='QMCPACK converter NOT available for this version of QE')
@@ -204,18 +218,16 @@ class QuantumEspresso(Package):
conflicts('@6.5:', when='+environ',
msg='6.4.x is the latest QE series supported by Environ')
+ # 6.8
+ patch_url = 'https://raw.githubusercontent.com/QMCPACK/qmcpack/develop/external_codes/quantum_espresso/add_pw2qmcpack_to_qe-6.8.diff'
+ patch_checksum = '69f7fbd72aba810c35a0b034188e45bea8f9f11d3150c0715e1b3518d5c09248'
+ patch(patch_url, sha256=patch_checksum, when='@6.8+qmcpack')
+
# 6.7
patch_url = 'https://raw.githubusercontent.com/QMCPACK/qmcpack/develop/external_codes/quantum_espresso/add_pw2qmcpack_to_qe-6.7.0.diff'
patch_checksum = '72564c168231dd4a1279a74e76919af701d47cee9a851db6e205753004fe9bb5'
patch(patch_url, sha256=patch_checksum, when='@6.7+qmcpack')
- # Need OpenMP threaded FFTW and BLAS libraries when configured
- # with OpenMP support
- conflicts('^fftw~openmp', when='+openmp')
- conflicts('^amdfftw~openmp', when='+openmp')
- conflicts('^openblas threads=none', when='+openmp')
- conflicts('^openblas threads=pthreads', when='+openmp')
-
# 6.4.1
patch_url = 'https://raw.githubusercontent.com/QMCPACK/qmcpack/develop/external_codes/quantum_espresso/add_pw2qmcpack_to_qe-6.4.1.diff'
patch_checksum = '57cb1b06ee2653a87c3acc0dd4f09032fcf6ce6b8cbb9677ae9ceeb6a78f85e2'
@@ -279,7 +291,7 @@ class QuantumEspresso(Package):
when='+patch@6.4.1:6.5.0')
# Configure updated to work with AOCC compilers
- patch('configure_aocc.patch', when='@6.7 %aocc')
+ patch('configure_aocc.patch', when='@6.7:6.8 %aocc')
# Configure updated to work with NVIDIA compilers
patch('nvhpc.patch', when='@6.5 %nvhpc')
@@ -291,7 +303,41 @@ class QuantumEspresso(Package):
# extlibs_makefile updated to work with fujitsu compilers
patch('fj-fox.patch', when='+patch %fj')
+ def cmake_args(self):
+ spec = self.spec
+
+ cmake_args = [
+ self.define_from_variant('QE_ENABLE_MPI', 'mpi'),
+ self.define_from_variant('QE_ENABLE_OPENMP', 'openmp'),
+ self.define_from_variant('QE_ENABLE_SCALAPACK', 'scalapack'),
+ self.define_from_variant('QE_ENABLE_ELPA', 'elpa'),
+ self.define_from_variant('QE_ENABLE_LIBXC', 'libxc'),
+ ]
+
+ # QE prefers taking MPI compiler wrappers as CMake compilers.
+ if '+mpi' in spec:
+ cmake_args.append(self.define('CMAKE_C_COMPILER', spec['mpi'].mpicc))
+ cmake_args.append(self.define('CMAKE_Fortran_COMPILER', spec['mpi'].mpifc))
+
+ if not spec.satisfies('hdf5=none'):
+ cmake_args.append(self.define('QE_ENABLE_HDF5', True))
+
+ if '+qmcpack' in spec:
+ cmake_args.append(self.define('QE_ENABLE_PW2QMCPACK', True))
+
+ return cmake_args
+
+ @when("~cmake")
+ def cmake(self, spec, prefix):
+ print("Bypass cmake stage when building via configure")
+
+ @when("~cmake")
+ def build(self, spec, prefix):
+ print("Bypass build stage when building via configure")
+
+ @when("~cmake")
def install(self, spec, prefix):
+ print("Override install stage when building via configure")
prefix_path = prefix.bin if '@:5.4.0' in spec else prefix
options = ['-prefix={0}'.format(prefix_path)]
diff --git a/var/spack/repos/builtin/packages/r-affy/package.py b/var/spack/repos/builtin/packages/r-affy/package.py
index 47722b3496..c84cb7a18f 100644
--- a/var/spack/repos/builtin/packages/r-affy/package.py
+++ b/var/spack/repos/builtin/packages/r-affy/package.py
@@ -23,7 +23,7 @@ class RAffy(RPackage):
version('1.56.0', commit='d36a7b8f05b1ef60162d94e75037d45c48f88871')
version('1.54.0', commit='a815f02906fcf491b28ed0a356d6fce95a6bd20e')
- depends_on('r@2.8.0:', type=('build', 'run'))
+ depends_on('r@2.8.0:4.0', type=('build', 'run'), when='@:1.68.0')
depends_on('r-biocgenerics@0.1.12:', type=('build', 'run'))
depends_on('r-biobase@2.5.5:', type=('build', 'run'))
depends_on('r-affyio@1.13.3:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-backports/package.py b/var/spack/repos/builtin/packages/r-backports/package.py
index 1d9c9de415..a564d85d46 100644
--- a/var/spack/repos/builtin/packages/r-backports/package.py
+++ b/var/spack/repos/builtin/packages/r-backports/package.py
@@ -16,10 +16,10 @@ class RBackports(RPackage):
functions or arguments by selectively importing specific backports to
support older installations."""
- homepage = "https://cloud.r-project.org/package=backports"
- url = "https://cloud.r-project.org/src/contrib/backports_1.1.1.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/backports"
+ homepage = "https://github.com/r-lib/backports"
+ cran = "backports"
+ version('1.4.0', sha256='e7611565d24a852ad8b08579a7c67ad9121c1bda148bade98c7bec686e8dabbf')
version('1.2.1', sha256='a2834bbd57e305e5d8010322f1906ea1789b3b5ba5eca77c5ff4248aceb7c2d5')
version('1.1.4', sha256='ee4b5efef22fa7ef27d7983ffcd31db52f81e1fbb7189c6e89ee09b69349ff03')
version('1.1.3', sha256='e41bd146824ec921994f1b176d0e4cca0b36dd3db32ca7a954d872a5ba214cc1')
diff --git a/var/spack/repos/builtin/packages/r-bh/package.py b/var/spack/repos/builtin/packages/r-bh/package.py
index 9c3e58a69e..f0ce70be0a 100644
--- a/var/spack/repos/builtin/packages/r-bh/package.py
+++ b/var/spack/repos/builtin/packages/r-bh/package.py
@@ -7,7 +7,9 @@ from spack import *
class RBh(RPackage):
- """Boost provides free peer-reviewed portable C++ source libraries. A large
+ """Boost C++ Header Files.
+
+ Boost provides free peer-reviewed portable C++ source libraries. A large
part of Boost is provided as C++ template code which is resolved entirely
at compile-time without linking. This package aims to provide the most
useful subset of Boost libraries for template use among CRAN package. By
@@ -23,9 +25,9 @@ class RBh(RPackage):
'tuple' 'type_trains' 'typeof' 'unordered' 'utility' 'uuid'."""
homepage = "https://cloud.r-project.org/package=BH"
- url = "https://cloud.r-project.org/src/contrib/BH_1.65.0-1.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/BH"
+ cran = "BH"
+ version('1.75.0-0', sha256='ae4c10992607dd697663f60675a46a5770851da159330bb63c4a68890bdd6f5a')
version('1.72.0-3', sha256='888ec1a3316bb69e1ba749b08ba7e0903ebc4742e3a185de8d148c13cddac8ab')
version('1.69.0-1', sha256='a0fd4364b7e368f09c56dec030823f52c16da0787580af7e4615eddeb99baca2')
version('1.65.0-1', sha256='82baa78afe8f1edc3c7e84e1c9924321047e14c1e990df9b848407baf3f7cb58')
diff --git a/var/spack/repos/builtin/packages/r-blob/package.py b/var/spack/repos/builtin/packages/r-blob/package.py
index a56c0a5642..2248d05dac 100644
--- a/var/spack/repos/builtin/packages/r-blob/package.py
+++ b/var/spack/repos/builtin/packages/r-blob/package.py
@@ -14,10 +14,10 @@ class RBlob(RPackage):
package provides the blob object, a list of raw vectors, suitable
for use as a column in data frame."""
- homepage = "https://cloud.r-project.org/package=blob"
- url = "https://cloud.r-project.org/src/contrib/blob_1.1.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/blob"
+ homepage = "https://blob.tidyverse.org"
+ cran = "blob"
+ version('1.2.2', sha256='4976053c65994c769a4c22b4553bea0bd9c623b3b991dbaf023d2a164770c7fa')
version('1.2.1', sha256='ef54bc7a9646c1b73f4d2f60c869b4f1940bc3505874175114297ad7772d8bea')
version('1.2.0', sha256='1af1cfa28607bc0e2f1f01598a00a7d5d1385ef160a9e79e568f30f56538e023')
version('1.1.0', sha256='16d6603df3ddba177f0ac4d9469c938f89131c4bf8834345db838defd9ffea16')
diff --git a/var/spack/repos/builtin/packages/r-brio/package.py b/var/spack/repos/builtin/packages/r-brio/package.py
index 4042b85592..6bef8729a6 100644
--- a/var/spack/repos/builtin/packages/r-brio/package.py
+++ b/var/spack/repos/builtin/packages/r-brio/package.py
@@ -14,7 +14,7 @@ class RBrio(RPackage):
explicit control over line endings."""
homepage = "https://github.com/r-lib/brio"
- url = "https://cloud.r-project.org/src/contrib/brio_1.1.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/brio"
+ cran = "brio"
+ version('1.1.3', sha256='eaa89041856189bee545bf1c42c7920a0bb0f1f70bb477487c467ee3e8fedcc6')
version('1.1.0', sha256='6bb3a3b47bea13f1a1e3dcdc8b9f688502643e4b40a481a34aa04a261aabea38')
diff --git a/var/spack/repos/builtin/packages/r-brms/package.py b/var/spack/repos/builtin/packages/r-brms/package.py
index 4a26556d94..3fefc3cf5c 100644
--- a/var/spack/repos/builtin/packages/r-brms/package.py
+++ b/var/spack/repos/builtin/packages/r-brms/package.py
@@ -28,6 +28,7 @@ class RBrms(RPackage):
homepage = "https://github.com/paul-buerkner/brms"
cran = "brms"
+ version('2.16.3', sha256='68302b10b5264f72d163d01c17792c002306cf37f0ee778dcec4c7e118f923e1')
version('2.16.1', sha256='749efbd9fb061fe207cf2e729c1387d9a8538b922f12ceec4e82a9f8dd9c1bc4')
version('2.15.0', sha256='c11701d1d8758590b74bb845b568b736e4455a81b114c7dfde0b27b7bd1bcc2f')
@@ -42,7 +43,7 @@ class RBrms(RPackage):
depends_on('r-rstantools@2.1.1:', type=('build', 'run'))
depends_on('r-bayesplot@1.5.0:', type=('build', 'run'))
depends_on('r-shinystan@2.4.0:', type=('build', 'run'))
- depends_on('r-projpred@2.0.0:', type=('build', 'run'))
+ depends_on('r-projpred@2.0.0:', when='@:2.16.1', type=('build', 'run'))
depends_on('r-bridgesampling@0.3-0:', type=('build', 'run'))
depends_on('r-glue@1.3.0:', type=('build', 'run'))
depends_on('r-future@1.19.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-car/package.py b/var/spack/repos/builtin/packages/r-car/package.py
index 71c4fcaad0..8df0b79598 100644
--- a/var/spack/repos/builtin/packages/r-car/package.py
+++ b/var/spack/repos/builtin/packages/r-car/package.py
@@ -15,6 +15,7 @@ class RCar(RPackage):
homepage = "https://r-forge.r-project.org/projects/car/"
cran = "car"
+ version('3.0-12', sha256='b899a6efae3842a90a2349d381dbcf4b4ed36bd03108ebe7380e81120e457302')
version('3.0-11', sha256='b32c927206f515631ff276dbb337b0f22e9b2d851f4abb1d2c272e534c19542c')
version('3.0-10', sha256='1ce316d2fee9b47c951d25d096be732489a3c9f6fc9e612a1eca2e50fb5925f1')
version('3.0-3', sha256='fa807cb12f6e7fb38ec534cac4eef54747945c2119a7d51155a2492ad778c36f')
@@ -32,7 +33,7 @@ class RCar(RPackage):
depends_on('r-pbkrtest@0.4-4:', type=('build', 'run'))
depends_on('r-quantreg', type=('build', 'run'))
depends_on('r-maptools', when='@3.0:', type=('build', 'run'))
- depends_on('r-rio', when='@3.0:', type=('build', 'run'))
+ depends_on('r-rio', when='@3.0:3.0-11', type=('build', 'run'))
depends_on('r-lme4@1.1-27.1:', when='@3.0-11:', type=('build', 'run'))
depends_on('r-lme4', when='@3.0:', type=('build', 'run'))
depends_on('r-nlme', when='@3.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-cli/package.py b/var/spack/repos/builtin/packages/r-cli/package.py
index c85a832bae..36f7ec07e3 100644
--- a/var/spack/repos/builtin/packages/r-cli/package.py
+++ b/var/spack/repos/builtin/packages/r-cli/package.py
@@ -19,6 +19,7 @@ class RCli(RPackage):
homepage = "https://github.com/r-lib/cli"
cran = "cli"
+ version('3.1.0', sha256='c70a61830bf706a84c59eb74a809978846cee93742198ab4192742a5df1ace11')
version('3.0.1', sha256='d89a25b6cd760e157605676e104ce65473a7d8d64c289efdd9640e949968b4fd')
version('2.2.0', sha256='39a77af61724f8cc1f5117011e17bb2a488cbac61a7c112db078a675d3ac40b8')
version('2.0.2', sha256='490834e5b80eb036befa0e150996bcab1c4d5d168c3d45209926e52d0d5413b6')
diff --git a/var/spack/repos/builtin/packages/r-colorspace/package.py b/var/spack/repos/builtin/packages/r-colorspace/package.py
index fe71033bda..d7765acb6d 100644
--- a/var/spack/repos/builtin/packages/r-colorspace/package.py
+++ b/var/spack/repos/builtin/packages/r-colorspace/package.py
@@ -25,10 +25,10 @@ class RColorspace(RPackage):
scientific paper: Zeileis et al. (2020, Journal of Statistical Software,
<doi:10.18637/jss.v096.i01>)."""
- homepage = "https://cloud.r-project.org/package=colorspace"
- url = "https://cloud.r-project.org/src/contrib/colorspace_1.3-2.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/colorspace"
+ homepage = "https://colorspace.R-Forge.R-project.org"
+ cran = "colorspace"
+ version('2.0-2', sha256='b891cd2ec129ed5f116429345947bcaadc33969758a108521eb0cf36bd12183a')
version('2.0-0', sha256='4e6a53af9918db282cefdc71eaa30f507d4d1d682bcfb74cb0dd68a0b282018e')
version('1.4-1', sha256='693d713a050f8bfecdb7322739f04b40d99b55aed168803686e43401d5f0d673')
version('1.4-0', sha256='ce003c5958dd704697959e9dc8a108c8cb568f8d78ece113235732afc5dff556')
diff --git a/var/spack/repos/builtin/packages/r-colourpicker/package.py b/var/spack/repos/builtin/packages/r-colourpicker/package.py
index 97620f0d7c..9d312c97db 100644
--- a/var/spack/repos/builtin/packages/r-colourpicker/package.py
+++ b/var/spack/repos/builtin/packages/r-colourpicker/package.py
@@ -19,6 +19,7 @@ class RColourpicker(RPackage):
homepage = "https://github.com/daattali/colourpicker"
cran = "colourpicker"
+ version('1.1.1', sha256='a0d09982b048b143e2c3438ccec039dd20d6f892fa0dedc9fdcb0d40de883ce0')
version('1.1.0', sha256='2dfbb6262d187d3b17357ff9c22670ced3621feda5b2a2a500558478e4d551e2')
depends_on('r@3.1.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-conquer/package.py b/var/spack/repos/builtin/packages/r-conquer/package.py
index 9051a0de80..2ed8466cc7 100644
--- a/var/spack/repos/builtin/packages/r-conquer/package.py
+++ b/var/spack/repos/builtin/packages/r-conquer/package.py
@@ -15,13 +15,14 @@ class RConquer(RPackage):
using multiplier bootstrap."""
homepage = "https://github.com/XiaoouPan/conquer"
- url = "https://cloud.r-project.org/src/contrib/conquer_1.0.2.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/conquer"
+ cran = "conquer"
+ version('1.2.1', sha256='1354f90f962a2124e155227cdc0ed2c6e54682f1e08934c49a827e51dc112f45')
version('1.0.2', sha256='542f6154ce1ffec0c1b4dd4e1f5b86545015f4b378c4c66a0840c65c57d674ff')
depends_on('r@3.5.0:', type=('build', 'run'))
depends_on('r-rcpp@1.0.3:', type=('build', 'run'))
depends_on('r-matrix', type=('build', 'run'))
depends_on('r-matrixstats', type=('build', 'run'))
+ depends_on('r-caret', when='@1.2:', type=('build', 'run'))
depends_on('r-rcpparmadillo@0.9.850.1.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-cpp11/package.py b/var/spack/repos/builtin/packages/r-cpp11/package.py
index 4f8aec55d0..eeed5e4a6d 100644
--- a/var/spack/repos/builtin/packages/r-cpp11/package.py
+++ b/var/spack/repos/builtin/packages/r-cpp11/package.py
@@ -17,5 +17,6 @@ class RCpp11(RPackage):
homepage = "https://github.com/r-lib/cpp11"
cran = "cpp11"
+ version('0.4.2', sha256='403ce0bf82358d237176053b0fb1e958cb6bfa4d0fb3555bf5801db6a6939b99')
version('0.4.0', sha256='1768fd07dc30dfbbf8f3fb1a1183947cb7e1dfd909165c4d612a63c163a41e87')
version('0.2.5', sha256='6fef9306c0c3043252c987e77c99ef679b2ea46dffafae318dbeb38ad21a2e20')
diff --git a/var/spack/repos/builtin/packages/r-crayon/package.py b/var/spack/repos/builtin/packages/r-crayon/package.py
index bd7cd38642..9324f7c43b 100644
--- a/var/spack/repos/builtin/packages/r-crayon/package.py
+++ b/var/spack/repos/builtin/packages/r-crayon/package.py
@@ -16,6 +16,7 @@ class RCrayon(RPackage):
homepage = "https://github.com/r-lib/crayon#readme"
cran = "crayon"
+ version('1.4.2', sha256='ee34397f643e76e30588068d4c93bd3c9afd2193deacccacb3bffcadf141b857')
version('1.4.1', sha256='08b6e42e748d096960b2f32b7ffe690c25742e29fe14c19d1834cd6ff43029c7')
version('1.3.4', sha256='fc6e9bf990e9532c4fcf1a3d2ce22d8cf12d25a95e4779adfa17713ed836fa68')
version('1.3.2', sha256='9a6b75d63c05fe64baf222f1921330ceb727924bcc5fc2753ff0528d42555e68')
diff --git a/var/spack/repos/builtin/packages/r-crosstalk/package.py b/var/spack/repos/builtin/packages/r-crosstalk/package.py
index ad3abb0342..befa2325e9 100644
--- a/var/spack/repos/builtin/packages/r-crosstalk/package.py
+++ b/var/spack/repos/builtin/packages/r-crosstalk/package.py
@@ -13,10 +13,10 @@ class RCrosstalk(RPackage):
other, with Shiny or without (i.e. static .html files). Currently supports
linked brushing and filtering."""
- homepage = "https://cloud.r-project.org/package=crosstalk"
- url = "https://cloud.r-project.org/src/contrib/crosstalk_1.0.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/crosstalk"
+ homepage = "https://rstudio.github.io/crosstalk"
+ cran = "crosstalk"
+ version('1.2.0', sha256='4237baab35cd246a8a98fb9cf4ce53b6ddbc31d00742ded4edea0479613d1ea0')
version('1.1.0.1', sha256='36a70b10bc11826e314c05f9579fd791b9ac3b3a2cfed4d4ca74ce1ad991300e')
version('1.0.0', sha256='b31eada24cac26f24c9763d9a8cbe0adfd87b264cf57f8725027fe0c7742ca51')
diff --git a/var/spack/repos/builtin/packages/r-dbi/package.py b/var/spack/repos/builtin/packages/r-dbi/package.py
index e62a09ed2d..013546ea30 100644
--- a/var/spack/repos/builtin/packages/r-dbi/package.py
+++ b/var/spack/repos/builtin/packages/r-dbi/package.py
@@ -7,14 +7,16 @@ from spack import *
class RDbi(RPackage):
- """A database interface definition for communication between R and
+ """R Database Interface.
+
+ A database interface definition for communication between R and
relational database management systems. All classes in this package are
virtual and need to be extended by the various R/DBMS implementations."""
- homepage = "http://rstats-db.github.io/DBI"
- url = "https://cloud.r-project.org/src/contrib/DBI_0.7.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/DBI"
+ homepage = "https://dbi.r-dbi.org"
+ cran = "DBI"
+ version('1.1.1', sha256='572ab3b8a6421d0ac3e7665c4c842826f1723af98fca25d4f43edb419e771344')
version('1.1.0', sha256='a96db7fa39a58f1ed34c6e78d8f5f7e4cf0882afb301323b5c6975d6729203e4')
version('1.0.0', sha256='ff16f118eb3f759183441835e932b87358dd80ab9800ce576a8f3df1b6f01cf5')
version('0.4-1', sha256='eff14a9af4975f23f8e1f4347d82c33c32c0b4f4f3e11370c582a89aeb8ac68e')
diff --git a/var/spack/repos/builtin/packages/r-desc/package.py b/var/spack/repos/builtin/packages/r-desc/package.py
index da96469f53..23e0ed8e17 100644
--- a/var/spack/repos/builtin/packages/r-desc/package.py
+++ b/var/spack/repos/builtin/packages/r-desc/package.py
@@ -7,16 +7,19 @@ from spack import *
class RDesc(RPackage):
- """desc: Manipulate DESCRIPTION Files"""
+ """Manipulate DESCRIPTION Files.
- homepage = "https://cloud.r-project.org/package=desc"
- url = "https://cloud.r-project.org/src/contrib/desc_1.2.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/desc/"
+ Tools to read, write, create, and manipulate DESCRIPTION files. It is
+ intended for packages that create or manipulate other packages."""
+ homepage = "https://github.com/r-lib/desc"
+ cran = "desc"
+
+ version('1.4.0', sha256='8220e4c706449b8121b822e70b1414f391ef419aed574836a234c63b83e5d649')
version('1.2.0', sha256='e66fb5d4fc7974bc558abcdc107a1f258c9177a29dcfcf9164bc6b33dd08dae8')
depends_on('r@3.1.0:', type=('build', 'run'))
- depends_on('r-assertthat', type=('build', 'run'))
depends_on('r-r6', type=('build', 'run'))
depends_on('r-crayon', type=('build', 'run'))
depends_on('r-rprojroot', type=('build', 'run'))
+ depends_on('r-assertthat', when='@:1.2', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-diffobj/package.py b/var/spack/repos/builtin/packages/r-diffobj/package.py
index 6445949014..024895a5a6 100644
--- a/var/spack/repos/builtin/packages/r-diffobj/package.py
+++ b/var/spack/repos/builtin/packages/r-diffobj/package.py
@@ -13,9 +13,9 @@ class RDiffobj(RPackage):
of their differences."""
homepage = "https://github.com/brodieG/diffobj"
- url = "https://cloud.r-project.org/src/contrib/diffobj_0.3.3.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/diffobj"
+ cran = "diffobj"
+ version('0.3.5', sha256='d860a79b1d4c9e369282d7391b539fe89228954854a65ba47181407c53e3cf60')
version('0.3.3', sha256='414e5573470b9565b9149a0a61c7e8344fb37f889d23dc4e131acc8aa62e6df4')
depends_on('r@3.1.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-dt/package.py b/var/spack/repos/builtin/packages/r-dt/package.py
index a0fc8655a6..7543f7fb5a 100644
--- a/var/spack/repos/builtin/packages/r-dt/package.py
+++ b/var/spack/repos/builtin/packages/r-dt/package.py
@@ -15,9 +15,9 @@ class RDt(RPackage):
abbreviation of 'DataTables'."""
homepage = "https://rstudio.github.io/DT"
- url = "https://cloud.r-project.org/src/contrib/DT_0.1.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/DT"
+ cran = "DT"
+ version('0.20', sha256='c66d7f49ec101fdbb91c6d26c06fb1373f9ebdefe29fe99f2ae1a641220aba9f')
version('0.17', sha256='e3430292421dcc2b6ad5f2deda729f0603da4eb31f86d071833e6e11abf3fb56')
version('0.13', sha256='79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5')
version('0.8', sha256='90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21')
@@ -33,4 +33,5 @@ class RDt(RPackage):
depends_on('r-jsonlite@0.9.16:', when='@0.8:', type=('build', 'run'))
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-crosstalk', type=('build', 'run'))
+ depends_on('r-jquerylib', when='@0.19:', type=('build', 'run'))
depends_on('r-promises', when='@0.5:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-emmeans/package.py b/var/spack/repos/builtin/packages/r-emmeans/package.py
index 8543001a23..745ae434a2 100644
--- a/var/spack/repos/builtin/packages/r-emmeans/package.py
+++ b/var/spack/repos/builtin/packages/r-emmeans/package.py
@@ -21,6 +21,7 @@ class REmmeans(RPackage):
homepage = "https://github.com/rvlenth/emmeans"
cran = "emmeans"
+ version('1.7.1-1', sha256='6b01eaad1ea0f96245db8563cc77929a3c3b96cd61c24ce1d452308d6e0250de')
version('1.7.0', sha256='d4b654896197dfda8354b33257380a66ee06117d6177b1ed7f1e42176525e9c5')
version('1.6.0', sha256='201bb7b008dde94231ed60bcc6a32749442faaab4baeea99ad28b97c951b3c1e')
diff --git a/var/spack/repos/builtin/packages/r-htmltools/package.py b/var/spack/repos/builtin/packages/r-htmltools/package.py
index ce3ac0dd20..2d3b82e8bd 100644
--- a/var/spack/repos/builtin/packages/r-htmltools/package.py
+++ b/var/spack/repos/builtin/packages/r-htmltools/package.py
@@ -14,6 +14,7 @@ class RHtmltools(RPackage):
homepage = "https://github.com/rstudio/htmltools"
cran = "htmltools"
+ version('0.5.2', sha256='7dc7d50436e5a82a5801f85bcd2f572a06a98b4027d71aa17b4854ec9b2767fb')
version('0.5.1.1', sha256='f0bfe72ffe330f3d6c9ead5857f3a4aef80e002e32558074a3e643f2ab67a4ba')
version('0.5.1', sha256='6ac82e4451f9558ceb541ea659a736b2ab3245827832b44d3661e7a4d91f6307')
version('0.3.6', sha256='44affb82f9c2fd76c9e2b58f9229adb003217932b68c3fdbf1327c8d74c868a2')
@@ -22,5 +23,7 @@ class RHtmltools(RPackage):
depends_on('r@2.14.1:', type=('build', 'run'))
depends_on('r-digest', type=('build', 'run'))
depends_on('r-base64enc', when='@0.5.1:', type=('build', 'run'))
+ depends_on('r-rlang@0.4.10:', when='@0.5.2:', type=('build', 'run'))
depends_on('r-rlang', when='@0.5.1:', type=('build', 'run'))
+ depends_on('r-fastmap', when='@0.5.2:', type=('build', 'run'))
depends_on('r-rcpp', when=' @:0.3.6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-jquerylib/package.py b/var/spack/repos/builtin/packages/r-jquerylib/package.py
new file mode 100644
index 0000000000..e6c97d2d16
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-jquerylib/package.py
@@ -0,0 +1,16 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class RJquerylib(RPackage):
+ """Obtain 'jQuery' as an HTML Dependency Object."""
+
+ cran = "jquerylib"
+
+ version('0.1.4', sha256='f0bcc11dcde3a6ff180277e45c24642d3da3c8690900e38f44495efbc9064411')
+
+ depends_on('r-htmltools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-packrat/package.py b/var/spack/repos/builtin/packages/r-packrat/package.py
index 4a897ab431..1b22874f9b 100644
--- a/var/spack/repos/builtin/packages/r-packrat/package.py
+++ b/var/spack/repos/builtin/packages/r-packrat/package.py
@@ -11,9 +11,9 @@ class RPackrat(RPackage):
and reproducible way."""
homepage = "https://github.com/rstudio/packrat/"
- url = "https://cloud.r-project.org/src/contrib/packrat_0.4.7-1.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/packrat"
+ cran = "packrat"
+ version('0.7.0', sha256='e8bce1fd78f28f3a7bf56e65a2ae2c6802e69bf55466c24e1d1a4b8a5f83dcc2')
version('0.5.0', sha256='d6a09290fbe037a6c740921c5dcd70b500e5b36e4713eae4010adf0c456bc5f7')
version('0.4.9-3', sha256='87299938a751defc54eb00a029aecd3522d6349d900aaa8b3e1aa6bf31e98234')
version('0.4.8-1', sha256='a283caf4fda419e6571ae9ca6210a59002a030721feb8a50c0d0787fd6f672f3')
diff --git a/var/spack/repos/builtin/packages/r-prettydoc/package.py b/var/spack/repos/builtin/packages/r-prettydoc/package.py
index ade056fc5b..1769d30d3a 100644
--- a/var/spack/repos/builtin/packages/r-prettydoc/package.py
+++ b/var/spack/repos/builtin/packages/r-prettydoc/package.py
@@ -19,5 +19,5 @@ class RPrettydoc(RPackage):
version('0.4.1', sha256='1094a69b026238d149435472b4f41c75151c7370a1be6c6332147c88ad4c4829')
- depends_on('r-markdown@1.17:', type=('build', 'run'))
+ depends_on('r-rmarkdown@1.17:', type=('build', 'run'))
depends_on('pandoc@1.12.3:', type='build')
diff --git a/var/spack/repos/builtin/packages/r-r6/package.py b/var/spack/repos/builtin/packages/r-r6/package.py
index 8445c1a792..549f2f2c2a 100644
--- a/var/spack/repos/builtin/packages/r-r6/package.py
+++ b/var/spack/repos/builtin/packages/r-r6/package.py
@@ -17,9 +17,9 @@ class RR6(RPackage):
classes are defined in different packages."""
homepage = "https://github.com/wch/R6/"
- url = "https://cloud.r-project.org/src/contrib/R6_2.2.2.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/R6"
+ cran = "R6"
+ version('2.5.1', sha256='8d92bd29c2ed7bf15f2778618ffe4a95556193d21d8431a7f75e7e5fc102bf48')
version('2.5.0', sha256='aec1af9626ec532cb883b544bf9eff4cb2d89c343c7ce0fa31761ec5a7882e02')
version('2.4.0', sha256='70be110174fbf5f5304049b186a6f9c05b77bfaec6d8caf980fcef5da6e0abce')
version('2.2.2', sha256='087756f471884c3b3ead80215a7cc5636a78b8a956e91675acfe2896426eae8f')
diff --git a/var/spack/repos/builtin/packages/r-rcpparmadillo/package.py b/var/spack/repos/builtin/packages/r-rcpparmadillo/package.py
index 3482664fa2..a0d88b0b63 100644
--- a/var/spack/repos/builtin/packages/r-rcpparmadillo/package.py
+++ b/var/spack/repos/builtin/packages/r-rcpparmadillo/package.py
@@ -22,9 +22,9 @@ class RRcpparmadillo(RPackage):
that"""
homepage = "https://cloud.r-project.org/package=RcppArmadillo"
- url = "https://cloud.r-project.org/src/contrib/RcppArmadillo_0.8.100.1.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/RcppArmadillo"
+ cran = "RcppArmadillo"
+ version('0.10.7.3.0', sha256='3710b767708e3b9408723eedb98391daa8651fda53a2c6b033273265512f6262')
version('0.10.1.2.2', sha256='38323703fcf2b61f46f2984aafdd3ddf17c3c993d1d27a8f0f4ba5012b99d069')
version('0.9.600.4.0', sha256='2057b7aa965a4c821dd734276d8e6a01cd59a1b52536b65cb815fa7e8c114f1e')
version('0.9.400.3.0', sha256='56936d501fe8e6f8796ae1a6badb9294d7dad98a0b557c3b3ce6bd4ecaad13b0')
diff --git a/var/spack/repos/builtin/packages/r-rio/package.py b/var/spack/repos/builtin/packages/r-rio/package.py
index 4c3fbe88e0..c3054a44a4 100644
--- a/var/spack/repos/builtin/packages/r-rio/package.py
+++ b/var/spack/repos/builtin/packages/r-rio/package.py
@@ -7,7 +7,9 @@ from spack import *
class RRio(RPackage):
- """Streamlined data import and export by making assumptions that the user
+ """A Swiss-Army Knife for Data I/O.
+
+ Streamlined data import and export by making assumptions that the user
is probably willing to make: 'import()' and 'export()' determine the data
structure from the file extension, reasonable defaults are used for data
import and export (e.g., 'stringsAsFactors=FALSE'), web-based import is
@@ -17,13 +19,14 @@ class RRio(RPackage):
provides a simple method for converting between file types."""
homepage = "https://github.com/leeper/rio"
- url = "https://cloud.r-project.org/src/contrib/rio_0.5.16.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/rio"
+ cran = "rio"
+ version('0.5.29', sha256='9fa63187e1814053e6ed2a164665b4924e08c3453adccb78f7211d403dcc5412')
version('0.5.16', sha256='d3eb8d5a11e0a3d26169bb9d08f834a51a6516a349854250629072d59c29d465')
depends_on('r@2.15.0:', type=('build', 'run'))
depends_on('r-foreign', type=('build', 'run'))
+ depends_on('r-haven@1.1.2:', when='@0.5.26:', type=('build', 'run'))
depends_on('r-haven@1.1.0:', type=('build', 'run'))
depends_on('r-curl@0.6:', type=('build', 'run'))
depends_on('r-data-table@1.9.8:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rmarkdown/package.py b/var/spack/repos/builtin/packages/r-rmarkdown/package.py
index 7d27f56806..14160e2b31 100644
--- a/var/spack/repos/builtin/packages/r-rmarkdown/package.py
+++ b/var/spack/repos/builtin/packages/r-rmarkdown/package.py
@@ -15,6 +15,7 @@ class RRmarkdown(RPackage):
homepage = "https://rmarkdown.rstudio.com/"
cran = "rmarkdown"
+ version('2.11', sha256='9371255300e7ea4cd936978ad2ca3d205d8605e09f4913cb0d4725005a7a9775')
version('2.9', sha256='6ce5af8b9a7c282619f74d3999d27ec4de12d3f93cde8fd12cc4c19f02ea8668')
version('2.6', sha256='e6e799c472de11e079bc752cca4b4dbd6803650649457bb6ae836cb1edcdf6b0')
version('1.14', sha256='f636b1048c5be56e06aa0b2b4342ad5c8192734f1e9b27468fef62be672edc61')
@@ -34,6 +35,7 @@ class RRmarkdown(RPackage):
depends_on('r-xfun', when='@1.13:', type=('build', 'run'))
depends_on('r-xfun@0.15:', when='@2.6:', type=('build', 'run'))
depends_on('r-xfun@0.21:', when='@2.8:', type=('build', 'run'))
+ depends_on('r-jquerylib', when='@2.11:', type=('build', 'run'))
depends_on('r-stringr@1.2.0:', when='@1.6:', type=('build', 'run'))
depends_on('r-rprojroot', when='@1.3:1.7', type=('build', 'run'))
depends_on('r-mime', when='@1.8:1.14', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rpostgresql/package.py b/var/spack/repos/builtin/packages/r-rpostgresql/package.py
index 437748d353..a7fd42e594 100644
--- a/var/spack/repos/builtin/packages/r-rpostgresql/package.py
+++ b/var/spack/repos/builtin/packages/r-rpostgresql/package.py
@@ -27,11 +27,3 @@ class RRpostgresql(RPackage):
depends_on('r@2.9.0:', type=('build', 'run'))
depends_on('r-dbi@0.3:', type=('build', 'run'))
depends_on('postgresql')
-
- depends_on('automake', type='build')
-
- patch_config_files = True
-
- @run_before('install')
- def patch_config_guess(self):
- AutotoolsPackage._do_patch_config_files(self)
diff --git a/var/spack/repos/builtin/packages/r-rsconnect/package.py b/var/spack/repos/builtin/packages/r-rsconnect/package.py
index e25a4593c5..115e91bd56 100644
--- a/var/spack/repos/builtin/packages/r-rsconnect/package.py
+++ b/var/spack/repos/builtin/packages/r-rsconnect/package.py
@@ -16,6 +16,7 @@ class RRsconnect(RPackage):
homepage = "https://github.com/rstudio/rsconnect"
cran = "rsconnect"
+ version('0.8.25', sha256='3c055277f745f2ca37a73e2f425249307cea4dc95ecc59fbe05ee8b6cf26d9cf')
version('0.8.17', sha256='64767a4d626395b7871375956a9f0455c3d64ff6e779633b0e554921d85da231')
depends_on('r@3.0.0:', type=('build', 'run'))
@@ -23,6 +24,7 @@ class RRsconnect(RPackage):
depends_on('r-digest', type=('build', 'run'))
depends_on('r-jsonlite', type=('build', 'run'))
depends_on('r-openssl', type=('build', 'run'))
+ depends_on('r-packrat@0.6:', when='@0.8.18:', type=('build', 'run'))
depends_on('r-packrat@0.5:', type=('build', 'run'))
depends_on('r-rstudioapi@0.5:', type=('build', 'run'))
depends_on('r-yaml@2.1.5:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-tictoc/package.py b/var/spack/repos/builtin/packages/r-tictoc/package.py
index 0b56b4d08a..b080c68383 100644
--- a/var/spack/repos/builtin/packages/r-tictoc/package.py
+++ b/var/spack/repos/builtin/packages/r-tictoc/package.py
@@ -7,8 +7,8 @@ from spack import *
class RTictoc(RPackage):
- """tictoc: Functions for timing R scripts, as well as implementations of
- Stack and List structures
+ """Functions for timing R scripts, as well as implementations of Stack and
+ List structures.
This package provides the timing functions 'tic' and 'toc'
that can be nested. One can record all timings while a
@@ -20,9 +20,10 @@ class RTictoc(RPackage):
'push', 'pop', 'first', 'last' and 'clear'."""
homepage = "https://collectivemedia.github.io/tictoc/"
- url = "https://cloud.r-project.org/src/contrib/tictoc_1.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/tictoc"
+ cran = "tictoc"
+ version('1.0.1', sha256='a09a1535c417ddf6637bbbda5fca6edab6c7f7b252a64e57e99d4d0748712705')
version('1.0', sha256='47da097c1822caa2d8e262381987cfa556ad901131eb96109752742526b2e2fe')
- depends_on('r@3.0.3:', type=('build', 'run'))
+ depends_on('r@3.0.3:', type=('build', 'run'), when='@1.0.1:')
+ depends_on('r@3.0.3:4.0', type=('build', 'run'), when='@1.0')
diff --git a/var/spack/repos/builtin/packages/r-tidyverse/package.py b/var/spack/repos/builtin/packages/r-tidyverse/package.py
index b09b4b19f4..268b6d6fe8 100644
--- a/var/spack/repos/builtin/packages/r-tidyverse/package.py
+++ b/var/spack/repos/builtin/packages/r-tidyverse/package.py
@@ -16,60 +16,89 @@ class RTidyverse(RPackage):
<https://tidyverse.org>."""
homepage = "https://tidyverse.tidyverse.org/"
- url = "https://cloud.r-project.org/src/contrib/tidyverse_1.2.1.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/tidyverse"
+ cran = "tidyverse"
+ version('1.3.1', sha256='83cf95109d4606236274f5a8ec2693855bf75d3a1b3bc1ab4426dcc275ed6632')
version('1.3.0', sha256='6d8acb81e994f9bef5e4dcf908bcea3786d108adcf982628235b6c8c80f6fe09')
version('1.2.1', sha256='ad67a27bb4e89417a15338fe1a40251a7b5dedba60e9b72637963d3de574c37b')
- depends_on('r+X', type=('build', 'run'))
+ depends_on('r@3.3:', when='@1.3.1:', type=('build', 'run'))
depends_on('r@3.2:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-broom@0.4.2:', type=('build', 'run'))
+ depends_on('r+X', type=('build', 'run'))
+
+ depends_on('r-broom@0.7.6:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-broom@0.5.2:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-cli@1.0.0:', type=('build', 'run'))
+ depends_on('r-broom@0.4.2:', type=('build', 'run'))
+ depends_on('r-cli@2.4.0:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-cli@1.1.0:', when='@1.3.0:', type=('build', 'run'))
+ depends_on('r-cli@1.0.0:', type=('build', 'run'))
+ depends_on('r-crayon@1.4.1:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-crayon@1.3.4:', type=('build', 'run'))
- depends_on('r-dbplyr@1.1.0:', type=('build', 'run'))
+ depends_on('r-dbplyr@2.1.1:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-dbplyr@1.4.2:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-dplyr@0.7.4:', type=('build', 'run'))
+ depends_on('r-dbplyr@1.1.0:', type=('build', 'run'))
+ depends_on('r-dplyr@1.0.5:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-dplyr@0.8.3:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-forcats@0.2.0:', type=('build', 'run'))
+ depends_on('r-dplyr@0.7.4:', type=('build', 'run'))
+ depends_on('r-dtplyr@1.1.0:', when='@1.3.1:', type=('build', 'run'))
+ depends_on('r-forcats@0.5.1:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-forcats@0.4.0:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-ggplot2@2.2.1:', type=('build', 'run'))
+ depends_on('r-forcats@0.2.0:', type=('build', 'run'))
+ depends_on('r-googledrive@1.0.1:', when='@1.3.1:', type=('build', 'run'))
+ depends_on('r-googlesheets4@0.3.0:', when='@1.3.1:', type=('build', 'run'))
+ depends_on('r-ggplot2@3.3.3:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-ggplot2@3.2.1:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-haven@1.1.0:', type=('build', 'run'))
+ depends_on('r-ggplot2@2.2.1:', type=('build', 'run'))
+ depends_on('r-haven@2.3.1:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-haven@2.2.0:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-hms@0.3:', type=('build', 'run'))
+ depends_on('r-haven@1.1.0:', type=('build', 'run'))
+ depends_on('r-hms@1.0.0:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-hms@0.5.2:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-httr@1.3.1:', type=('build', 'run'))
+ depends_on('r-hms@0.3:', type=('build', 'run'))
+ depends_on('r-httr@1.4.2:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-httr@1.4.1:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-jsonlite@1.5:', type=('build', 'run'))
+ depends_on('r-httr@1.3.1:', type=('build', 'run'))
+ depends_on('r-jsonlite@1.7.2:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-jsonlite@1.6:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-lubridate@1.7.1:', type=('build', 'run'))
+ depends_on('r-jsonlite@1.5:', type=('build', 'run'))
+ depends_on('r-lubridate@1.7.10:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-lubridate@1.7.4:', when='@1.3.0:', type=('build', 'run'))
+ depends_on('r-lubridate@1.7.1:', type=('build', 'run'))
+ depends_on('r-magrittr@2.0.1:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-magrittr@1.5:', type=('build', 'run'))
- depends_on('r-modelr@0.1.1:', type=('build', 'run'))
+ depends_on('r-modelr@0.1.8:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-modelr@0.1.5:', when='@1.3.0:', type=('build', 'run'))
+ depends_on('r-modelr@0.1.1:', type=('build', 'run'))
+ depends_on('r-pillar@1.6.0:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-pillar@1.4.2:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-purrr@0.2.4:', type=('build', 'run'))
+ depends_on('r-purrr@0.3.4:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-purrr@0.3.3:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-readr@1.1.1:', type=('build', 'run'))
+ depends_on('r-purrr@0.2.4:', type=('build', 'run'))
+ depends_on('r-readr@1.4.0:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-readr@1.3.1:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-readxl@1.0.0:', type=('build', 'run'))
+ depends_on('r-readr@1.1.1:', type=('build', 'run'))
depends_on('r-readxl@1.3.1:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-reprex@0.1.1:', type=('build', 'run'))
+ depends_on('r-readxl@1.0.0:', type=('build', 'run'))
+ depends_on('r-reprex@2.0.0:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-reprex@0.3.0:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-rlang@0.1.4:', type=('build', 'run'))
+ depends_on('r-reprex@0.1.1:', type=('build', 'run'))
+ depends_on('r-rlang@0.4.10:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-rlang@0.4.1:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-rstudioapi@0.7:', type=('build', 'run'))
+ depends_on('r-rlang@0.1.4:', type=('build', 'run'))
+ depends_on('r-rstudioapi@0.13:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-rstudioapi@0.10:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-rvest@0.3.2:', type=('build', 'run'))
+ depends_on('r-rstudioapi@0.7:', type=('build', 'run'))
+ depends_on('r-rvest@1.0.0:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-rvest@0.3.5:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-stringr@1.2.0:', type=('build', 'run'))
+ depends_on('r-rvest@0.3.2:', type=('build', 'run'))
depends_on('r-stringr@1.4.0:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-tibble@1.3.4:', type=('build', 'run'))
+ depends_on('r-stringr@1.2.0:', type=('build', 'run'))
+ depends_on('r-tibble@3.1.0:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-tibble@2.1.3:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-tidyr@0.7.2:', type=('build', 'run'))
+ depends_on('r-tibble@1.3.4:', type=('build', 'run'))
+ depends_on('r-tidyr@1.1.3:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-tidyr@1.0.0:', when='@1.3.0:', type=('build', 'run'))
- depends_on('r-xml2@1.1.1:', type=('build', 'run'))
+ depends_on('r-tidyr@0.7.2:', type=('build', 'run'))
+ depends_on('r-xml2@1.3.2:', when='@1.3.1:', type=('build', 'run'))
depends_on('r-xml2@1.2.2:', when='@1.3.0:', type=('build', 'run'))
+ depends_on('r-xml2@1.1.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-v8/package.py b/var/spack/repos/builtin/packages/r-v8/package.py
index c57a192f61..f1926109bb 100644
--- a/var/spack/repos/builtin/packages/r-v8/package.py
+++ b/var/spack/repos/builtin/packages/r-v8/package.py
@@ -10,16 +10,18 @@ class RV8(RPackage):
"""V8: Embedded JavaScript and WebAssembly Engine for R"""
homepage = "https://github.com/jeroen/v8"
- url = "https://cloud.r-project.org/src/contrib/V8_3.4.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/V8"
+ cran = "V8"
- version('3.4.0', sha256='f5c8a2a03cc1be9f504f47711a0fcd1b962745139c9fb2a10fbd79c4ae103fbd')
+ version('3.6.0', sha256='a3969898bf4a7c13d3130fae0d385cd048d46372ff4a412917b914b159261377')
+ version('3.4.0', sha256='f5c8a2a03cc1be9f504f47711a0fcd1b962745139c9fb2a10fbd79c4ae103fbd',
+ deprecated=True)
- depends_on('r-curl@1.0:', type=('build', 'run'))
- depends_on('r-jsonlite@1.0:', type=('build', 'run'))
depends_on('r-rcpp@0.12.12:', type=('build', 'run'))
+ depends_on('r-jsonlite@1.0:', type=('build', 'run'))
+ depends_on('r-curl@1.0:', type=('build', 'run'))
conflicts('@3.4.0', when='target=aarch64:')
+ conflicts('@3.4.0', when='%gcc@5:')
def setup_build_environment(self, env):
spec = self.spec
diff --git a/var/spack/repos/builtin/packages/r-viridislite/package.py b/var/spack/repos/builtin/packages/r-viridislite/package.py
index 352d17d971..6125075cfd 100644
--- a/var/spack/repos/builtin/packages/r-viridislite/package.py
+++ b/var/spack/repos/builtin/packages/r-viridislite/package.py
@@ -10,9 +10,9 @@ class RViridislite(RPackage):
"""viridisLite: Default Color Maps from 'matplotlib' (Lite Version)"""
homepage = "https://github.com/sjmgarnier/viridisLite"
- url = "https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz"
- list_url = "https://cloud.r-project.org/src/contrib/Archive/viridisLite"
+ cran = "viridisLite"
+ version('0.4.0', sha256='849955dc8ad9bc52bdc50ed4867fd92a510696fc8294e6971efa018437c83c6a')
version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af')
version('0.2.0', sha256='2d4d909f21c51e720bd685f05041ba158294e0a4064e0946d0bd916709818694')
diff --git a/var/spack/repos/builtin/packages/r-vroom/package.py b/var/spack/repos/builtin/packages/r-vroom/package.py
index ad2645bdf5..9019157988 100644
--- a/var/spack/repos/builtin/packages/r-vroom/package.py
+++ b/var/spack/repos/builtin/packages/r-vroom/package.py
@@ -18,6 +18,7 @@ class RVroom(RPackage):
homepage = "https://github.com/r-lib/vroom"
cran = "vroom"
+ version('1.5.7', sha256='d087cb148f71c222fc89199d03df2502689149873414a6d89c2f006d3a109fde')
version('1.5.5', sha256='1d45688c08f162a3300eda532d9e87d144f4bc686769a521bf9a12e3d3b465fe')
depends_on('r@3.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/racket/package.py b/var/spack/repos/builtin/packages/racket/package.py
new file mode 100644
index 0000000000..0b5e9d9a5f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/racket/package.py
@@ -0,0 +1,67 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Racket(Package):
+ """The Racket programming language."""
+
+ homepage = "https://www.racket-lang.org"
+
+ maintainers = ['arjunguha', 'elfprince13']
+
+ version('8.3', '3b963cd29ae119e1acc2c6dc4781bd9f25027979589caaae3fdfc021aac2324b')
+
+ depends_on('libffi', type=('build', 'link', 'run'))
+ depends_on('patchutils')
+ depends_on('libtool', type=('build'))
+
+ phases = ['configure', 'build', 'install']
+
+ def url_for_version(self, version):
+ return "https://mirror.racket-lang.org/installers/{0}/racket-minimal-{0}-src-builtpkgs.tgz".format(version)
+
+ variant('cs', default=True, description='Build Racket CS (new ChezScheme VM)')
+ variant('bc', default=False, description='Build Racket BC (old MZScheme VM)')
+ variant('shared', default=True, description="Enable shared")
+ variant('jit', default=True, description="Just-in-Time Compilation")
+
+ parallel = False
+ extendable = True
+
+ def toggle(self, spec, variant):
+ toggle_text = ("enable" if spec.variants[variant].value else "disable")
+ return "--{0}-{1}".format(toggle_text, variant)
+
+ def configure(self, spec, prefix):
+ with working_dir('src'):
+ configure = Executable("./configure")
+ configure_args = [self.toggle(spec, 'cs'),
+ self.toggle(spec, 'bc'),
+ self.toggle(spec, 'jit')]
+ toggle_shared = self.toggle(spec, 'shared')
+ if sys.platform == 'darwin':
+ configure_args += ["--enable-macprefix"]
+ if "+xonx" in spec:
+ configure_args += ["--enable-xonx", toggle_shared]
+ else:
+ configure_args += [toggle_shared]
+ configure_args += ["--prefix={0}".format(prefix)]
+ configure(*configure_args)
+
+ def build(self, spec, prefix):
+ with working_dir('src'):
+ if spec.variants["bc"].value:
+ make("bc")
+ if spec.variants["cs"].value:
+ make("cs")
+
+ def install(self, spec, prefix):
+ with working_dir('src'):
+ if spec.variants["bc"].value:
+ make('install-bc')
+ if spec.variants["cs"].value:
+ make('install-cs')
diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py
index dc2a2440cb..fd76843ec8 100644
--- a/var/spack/repos/builtin/packages/raja/package.py
+++ b/var/spack/repos/builtin/packages/raja/package.py
@@ -58,6 +58,8 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage):
depends_on('camp@0.2.2', when='@0.14.0:')
depends_on('camp@0.1.0', when='@0.12.0:0.13.0')
+ depends_on('cmake@:3.20', when='+rocm', type='build')
+
with when('+rocm @0.12.0:'):
depends_on('camp+rocm')
for arch in ROCmPackage.amdgpu_targets:
@@ -126,10 +128,16 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage):
entries = []
entries.append(cmake_cache_path("BLT_SOURCE_DIR", spec['blt'].prefix))
- entries.append(cmake_cache_path("camp_DIR", spec['camp'].prefix))
+ if 'camp' in self.spec:
+ entries.append(cmake_cache_path("camp_DIR", spec['camp'].prefix))
entries.append(cmake_cache_option("BUILD_SHARED_LIBS", '+shared' in spec))
entries.append(cmake_cache_option("ENABLE_EXAMPLES", '+examples' in spec))
- entries.append(cmake_cache_option("ENABLE_EXERCISES", '+exercises' in spec))
+ if spec.satisfies('@0.14.0:'):
+ entries.append(cmake_cache_option("RAJA_ENABLE_EXERCISES",
+ '+exercises' in spec))
+ else:
+ entries.append(cmake_cache_option("ENABLE_EXERCISES",
+ '+exercises' in spec))
# Work around spack adding -march=ppc64le to SPACK_TARGET_ARGS which
# is used by the spack compiler wrapper. This can go away when BLT
diff --git a/var/spack/repos/builtin/packages/random123/package.py b/var/spack/repos/builtin/packages/random123/package.py
index 7e9a898d23..6c527e0bc0 100644
--- a/var/spack/repos/builtin/packages/random123/package.py
+++ b/var/spack/repos/builtin/packages/random123/package.py
@@ -13,11 +13,15 @@ class Random123(Package):
conventional approach of using N iterations of a stateful
transformation."""
homepage = "https://www.deshawresearch.com/resources_random123.html"
- url = "https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.09.tar.gz"
+ url = "https://github.com/DEShawResearch/random123/archive/refs/tags/v1.14.0.tar.gz"
- version('1.13.2', sha256='74a1c6bb66b2684f03d3b1008642a2e9141909103cd09f428d2c60bcaa51cb40')
- version('1.10', sha256='4afdfba4b941e33e23b5de9b7907b7e3ac326cb4d34b5fa8225edd00b5fe053b')
- version('1.09', sha256='cf6abf623061bcf3d17e5e49bf3f3f0ae400ee89ae2e97c8cb8dcb918b1ebabe')
+ version('1.14.0', sha256='effafd8656b18030b2a5b995cd3650c51a7c45052e6e1c21e48b9fa7a59d926e')
+ version('1.13.2', sha256='74a1c6bb66b2684f03d3b1008642a2e9141909103cd09f428d2c60bcaa51cb40',
+ url='https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.13.2.tar.gz')
+ version('1.10', sha256='4afdfba4b941e33e23b5de9b7907b7e3ac326cb4d34b5fa8225edd00b5fe053b',
+ url='https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.10.tar.gz')
+ version('1.09', sha256='cf6abf623061bcf3d17e5e49bf3f3f0ae400ee89ae2e97c8cb8dcb918b1ebabe',
+ url='https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.09.tar.gz')
patch('ibmxl.patch', when='@1.09')
patch('arm-gcc.patch', when='@1.09')
diff --git a/var/spack/repos/builtin/packages/rapidjson/arm.patch b/var/spack/repos/builtin/packages/rapidjson/arm.patch
index db14814048..db14814048 100755..100644
--- a/var/spack/repos/builtin/packages/rapidjson/arm.patch
+++ b/var/spack/repos/builtin/packages/rapidjson/arm.patch
diff --git a/var/spack/repos/builtin/packages/raxml/nox86.patch b/var/spack/repos/builtin/packages/raxml/nox86.patch
index 16ad6fa67a..16ad6fa67a 100755..100644
--- a/var/spack/repos/builtin/packages/raxml/nox86.patch
+++ b/var/spack/repos/builtin/packages/raxml/nox86.patch
diff --git a/var/spack/repos/builtin/packages/rclone/package.py b/var/spack/repos/builtin/packages/rclone/package.py
index acef5bd475..7d131e4631 100644
--- a/var/spack/repos/builtin/packages/rclone/package.py
+++ b/var/spack/repos/builtin/packages/rclone/package.py
@@ -11,10 +11,11 @@ class Rclone(Package):
to and from various cloud storage providers"""
homepage = "https://rclone.org"
- url = "https://github.com/ncw/rclone/releases/download/v1.56.2/rclone-v1.56.2.tar.gz"
+ url = "https://github.com/ncw/rclone/releases/download/v1.57.0/rclone-v1.57.0.tar.gz"
maintainers = ['alecbcs']
+ version('1.57.0', sha256='3a762c02c202a9142c2d5c1a3927563a556d1683abadd25d2f695e237e4ea693')
version('1.56.2', sha256='a8813d25c4640e52495fee83e525e76283c63f01d1cce8fbb58d8486b0c20c8a')
version('1.56.1', sha256='090b4b082caa554812f341ae26ea6758b40338836122595d6283c60c39eb5a97')
version('1.56.0', sha256='81d2eda23ebaad0a355aab6ff030712470a42505b94c01c9bb5a9ead9168cedb')
diff --git a/var/spack/repos/builtin/packages/reframe/package.py b/var/spack/repos/builtin/packages/reframe/package.py
index 18ed0de0be..150d5d5f13 100644
--- a/var/spack/repos/builtin/packages/reframe/package.py
+++ b/var/spack/repos/builtin/packages/reframe/package.py
@@ -24,6 +24,8 @@ class Reframe(Package):
maintainers = ['victorusu', 'vkarak']
version('master', branch='master')
+ version('3.9.2', sha256='2b60422615d5b52e5dca54ace0f53a712419bcce00a5515775e57e5f5f9d6e92')
+ version('3.9.1', sha256='8f7f4991d1c32cc23f8b10a7509166030548bfe84e4785d017d8d797e31b0498')
version('3.9.0', sha256='ccc36cb1db12148fe7658583e83c2717f5aae0d8c58f6b6ddd398e187c3edc3a')
version('3.8.3', sha256='50b05b0952954215ac00a8b2e8944c946f387043660184f2fbf75995d0579d83')
version('3.8.2', sha256='89116b320021193156f3d7f27057aeb900936502219e2aefa880bc0311052dbf')
diff --git a/var/spack/repos/builtin/packages/rhash/package.py b/var/spack/repos/builtin/packages/rhash/package.py
index eee6a50b03..641b3dc4f4 100644
--- a/var/spack/repos/builtin/packages/rhash/package.py
+++ b/var/spack/repos/builtin/packages/rhash/package.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
+
class Rhash(MakefilePackage):
"""RHash is a console utility for computing and verifying hash sums of
@@ -13,11 +15,18 @@ class Rhash(MakefilePackage):
homepage = "https://sourceforge.net/projects/rhash/"
url = "https://github.com/rhash/RHash/archive/v1.3.5.tar.gz"
+ version('1.4.2', sha256='600d00f5f91ef04194d50903d3c79412099328c42f28ff43a0bdb777b00bec62')
version('1.3.5', sha256='98e0688acae29e68c298ffbcdbb0f838864105f9b2bd8857980664435b1f1f2e')
+ # configure: fix clang detection on macOS
+ # Patch accepted and merged upstream, remove on next release
+ patch('https://github.com/rhash/RHash/commit/4dc506066cf1727b021e6352535a8bb315c3f8dc.patch?full_index=1',
+ when='@1.4.2', sha256='3fbfe4603d2ec5228fd198fc87ff3ee281e1f68d252c1afceaa15cba76e9b6b4')
+
# For macOS build instructions, see:
# https://github.com/Homebrew/homebrew-core/blob/master/Formula/rhash.rb
+ @when('@:1.3.5')
def build(self, spec, prefix):
# Doesn't build shared libraries by default
make('PREFIX={0}'.format(prefix))
@@ -27,6 +36,11 @@ class Rhash(MakefilePackage):
else:
make('PREFIX={0}'.format(prefix), 'lib-shared')
+ @when('@1.3.6:')
+ def build(self, spec, prefix):
+ configure('--prefix=')
+ make()
+
def check(self):
# Makefile has both `test` and `check` targets:
#
@@ -36,12 +50,19 @@ class Rhash(MakefilePackage):
# Default implmentation is to run both `make test` and `make check`.
# `test` passes, but `check` fails, so only run `test`.
make('test')
- make('test-static-lib')
+ if self.spec.satisfies('@:1.3.5'):
+ make('test-static-lib')
+ else:
+ make('test-lib-static')
- if not self.spec.satisfies('platform=darwin'):
+ if not self.spec.satisfies('@:1.3.5 platform=darwin'):
make('test-shared')
- make('test-shared-lib')
+ if self.spec.satisfies('@:1.3.5'):
+ make('test-shared-lib')
+ else:
+ make('test-lib-shared')
+ @when('@:1.3.5')
def install(self, spec, prefix):
# Some things are installed to $(DESTDIR)$(PREFIX) while other things
# are installed to $DESTDIR/etc.
@@ -52,3 +73,25 @@ class Rhash(MakefilePackage):
install('librhash/*.dylib', prefix.lib)
else:
make('install-lib-shared', 'DESTDIR={0}'.format(prefix), 'PREFIX=')
+ os.symlink(join_path(prefix.lib, 'librhash.so.0'),
+ join_path(prefix.lib, 'librhash.so'))
+
+ @when('@1.3.6:')
+ def install(self, spec, prefix):
+ # Intermittent issues during installation, prefix.bin directory already exists
+ make('install', 'DESTDIR={0}'.format(prefix), parallel=False)
+ make('install-pkg-config', 'DESTDIR={0}'.format(prefix))
+ make('install-lib-so-link', 'DESTDIR={0}'.format(prefix))
+ make('install-lib-headers', 'DESTDIR={0}'.format(prefix))
+
+ @run_after('install')
+ def darwin_fix(self):
+ # The shared library is not installed correctly on Darwin; fix this
+ if self.spec.satisfies('@1.3.6: platform=darwin'):
+ # Fix RPATH for <prefix>/bin/rhash
+ old = '/lib/librhash.0.dylib'
+ new = self.prefix.lib.join('librhash.dylib')
+ install_name_tool = Executable('install_name_tool')
+ install_name_tool('-change', old, new, self.prefix.bin.rhash)
+ # Fix RPATH for <prefix>/lib/librhash.dylib
+ fix_darwin_install_name(self.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/rivet/package.py b/var/spack/repos/builtin/packages/rivet/package.py
index 2ca4014449..6a1df5cf3a 100644
--- a/var/spack/repos/builtin/packages/rivet/package.py
+++ b/var/spack/repos/builtin/packages/rivet/package.py
@@ -168,7 +168,7 @@ class Rivet(AutotoolsPackage):
if self.spec.variants['hepmc'].value == '2':
args += ['--with-hepmc=' + self.spec['hepmc'].prefix]
else:
- args += ['--with-hepmc3=' + self.spec['hepmc'].prefix]
+ args += ['--with-hepmc3=' + self.spec['hepmc3'].prefix]
if self.spec.satisfies('@:1'):
args += ['--with-boost-incpath=' + self.spec['boost'].includes]
diff --git a/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py b/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py
index 32af84d329..baf58a7a3c 100644
--- a/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py
+++ b/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py
@@ -19,7 +19,8 @@ aomp = [
"808fca9bdefb109d5bcbbc9f5b59c564a6d422488869e986516f2a7233eda235",
"aa75455cf1d333419e5310117678e5789c5222f7cb05b05e3dfacef855c55d84",
"9e6ed2c7bdc3b4af069751b5d3e92913fd5ac318ae844f68bd78c5def990a8f7",
- "c368d39ba9c1bc8b0edbe66edaa3f2a4ff5649c2bd16f499ac19dfd1591dec5a"
+ "c368d39ba9c1bc8b0edbe66edaa3f2a4ff5649c2bd16f499ac19dfd1591dec5a",
+ "c2b1a61a15fdf8d50c7c7a1ad75512f059c53a7bd5afe85f69e984f1174aa74a"
]
devlib = [
@@ -27,7 +28,8 @@ devlib = [
"bca9291385d6bdc91a8b39a46f0fd816157d38abb1725ff5222e6a0daa0834cc",
"d0aa495f9b63f6d8cf8ac668f4dc61831d996e9ae3f15280052a37b9d7670d2a",
"f5f5aa6bfbd83ff80a968fa332f80220256447c4ccb71c36f1fbd2b4a8e9fc1b",
- "34a2ac39b9bb7cfa8175cbab05d30e7f3c06aaffce99eed5f79c616d0f910f5f"
+ "34a2ac39b9bb7cfa8175cbab05d30e7f3c06aaffce99eed5f79c616d0f910f5f",
+ "055a67e63da6491c84cd45865500043553fb33c44d538313dd87040a6f3826f2"
]
llvm = [
@@ -35,7 +37,8 @@ llvm = [
"8262aff88c1ff6c4deb4da5a4f8cda1bf90668950e2b911f93f73edaee53b370",
"aa1f80f429fded465e86bcfaef72255da1af1c5c52d58a4c979bc2f6c2da5a69",
"244e38d824fa7dfa8d0edf3c036b3c84e9c17a16791828e4b745a8d31eb374ae",
- "751eca1d18595b565cfafa01c3cb43efb9107874865a60c80d6760ba83edb661"
+ "751eca1d18595b565cfafa01c3cb43efb9107874865a60c80d6760ba83edb661",
+ "1567d349cd3bcd2c217b3ecec2f70abccd5e9248bd2c3c9f21d4cdb44897fc87"
]
flang = [
@@ -43,7 +46,8 @@ flang = [
"3990d39ff1c908b150f464f0653a123d94be30802f9cad6af18fbb560c4b412e",
"f3e19699ce4ac404f41ffe08ef4546e31e2e741d8deb403b5477659e054275d5",
"f41f661425534b5cfb20e2c0efd9d0800609dc3876ee9c3f76f026d36abbfa35",
- "d6c3f3aaa289251a433d99d1cffe432812093089ae876a6863295a15066c1eaf"
+ "d6c3f3aaa289251a433d99d1cffe432812093089ae876a6863295a15066c1eaf",
+ "13d3525078fd1c569f7c8ea7fce439b04f6b03814bbe88600c08f95c788e7802"
]
extras = [
@@ -51,10 +55,11 @@ extras = [
"5d98d34aff97416d8b5b9e16e7cf474580f8de8a73bd0e549c4440a3c5df4ef5",
"51cc8a7c5943e1d9bc657fc9b9797f45e3ce6a4e544d3d3a967c7cd0185a0510",
"91fdfadb94aa6afc1942124d0953ddc80c297fa75de1897fb42ac8e7dea51ab9",
- "31bbe70b51c259a54370d021ae63528a1740b5477a22412685afd14150fff6f4"
+ "31bbe70b51c259a54370d021ae63528a1740b5477a22412685afd14150fff6f4",
+ "ec6cc4a9c24f098496de3206714dafe9a714f06afacfe21d53a4e6344f9cb4c9"
]
-versions = ['3.9.0', '3.10.0', '4.0.0', '4.1.0', '4.2.0']
+versions = ['3.9.0', '3.10.0', '4.0.0', '4.1.0', '4.2.0', '4.3.0']
versions_dict = dict()
components = ['aomp', 'devlib', 'llvm', 'flang', 'extras']
component_hashes = [aomp, devlib, llvm, flang, extras]
@@ -70,9 +75,10 @@ class RocmOpenmpExtras(Package):
"""OpenMP support for ROCm LLVM."""
homepage = tools_url + "/aomp"
- url = tools_url + "/aomp/archive/rocm-4.2.0.tar.gz"
+ url = tools_url + "/aomp/archive/rocm-4.3.0.tar.gz"
maintainers = ['srekolam', 'arjun-raj-kuppala', 'estewart08']
+ version('4.3.0', sha256=versions_dict['4.3.0']['aomp'])
version('4.2.0', sha256=versions_dict['4.2.0']['aomp'])
version('4.1.0', sha256=versions_dict['4.1.0']['aomp'])
version('4.0.0', sha256=versions_dict['4.0.0']['aomp'])
@@ -88,13 +94,11 @@ class RocmOpenmpExtras(Package):
depends_on('elfutils', type=('build', 'link'))
depends_on('libffi', type=('build', 'link'))
- for ver in ['3.9.0', '3.10.0', '4.0.0', '4.1.0', '4.2.0']:
+ for ver in ['3.9.0', '3.10.0', '4.0.0', '4.1.0', '4.2.0', '4.3.0']:
depends_on('hsakmt-roct@' + ver, when='@' + ver)
depends_on('comgr@' + ver, when='@' + ver)
depends_on('hsa-rocr-dev@' + ver, when='@' + ver)
- # standalone rocm-device-libs
- depends_on('rocm-device-libs@' + ver, when='@' + ver)
- depends_on('llvm-amdgpu@{0} ~rocm-device-libs ~openmp'.format(ver),
+ depends_on('llvm-amdgpu@{0} ~openmp'.format(ver),
when='@' + ver)
# tag changed to 'rocm-' in 4.0.0
@@ -141,7 +145,7 @@ class RocmOpenmpExtras(Package):
when='@' + ver)
def setup_run_environment(self, env):
- devlibs_prefix = self.spec['rocm-device-libs'].prefix
+ devlibs_prefix = self.spec['llvm-amdgpu'].prefix
openmp_extras_prefix = self.spec['rocm-openmp-extras'].prefix
llvm_prefix = self.spec['llvm-amdgpu'].prefix
env.set('AOMP', '{0}'.format(llvm_prefix))
@@ -169,22 +173,22 @@ class RocmOpenmpExtras(Package):
def patch(self):
src = self.stage.source_path
- flang_warning = '-Wno-incompatible-pointer-types-discards-qualifiers)'
aomp_extras = '{0}/rocm-openmp-extras/aomp-extras/aomp-device-libs'
libomptarget = \
'{0}/rocm-openmp-extras/llvm-project/openmp/libomptarget'
flang = '{0}/rocm-openmp-extras/flang/'
+ # If not in a git repo the STRIP command will have an empty
+ # argument. This is fixed in later versions.
+ if self.spec.version == Version('4.3.0'):
+ filter_file('STRIP ${FLANG_SHA}', 'STRIP 0',
+ flang.format(src) + 'CMakeLists.txt', string=True)
+
if self.spec.version < Version('4.1.0'):
plugin = '/plugins/hsa/CMakeLists.txt'
else:
- # Spack thinks some warnings from the flang build are errors.
- # Disable those warnings.
- filter_file('PRIVATE -fPIC)',
- 'PRIVATE -fPIC PRIVATE ' + flang_warning,
- flang.format(src) + 'runtime/flang/CMakeLists.txt',
- string=True)
plugin = '/plugins/amdgpu/CMakeLists.txt'
+
filter_file(
'{ROCM_DIR}/amdgcn/bitcode', '{DEVICE_LIBS_DIR}',
aomp_extras.format(src) + '/aompextras/CMakeLists.txt',
@@ -257,7 +261,7 @@ class RocmOpenmpExtras(Package):
src = self.stage.source_path
gfx_list = "gfx700;gfx701;gfx801;gfx803;gfx900;gfx902;gfx906;gfx908"
openmp_extras_prefix = self.spec['rocm-openmp-extras'].prefix
- devlibs_prefix = self.spec['rocm-device-libs'].prefix
+ devlibs_prefix = self.spec['llvm-amdgpu'].prefix
devlibs_src = '{0}/rocm-openmp-extras/rocm-device-libs'.format(src)
hsa_prefix = self.spec['hsa-rocr-dev'].prefix
hsakmt_prefix = self.spec['hsakmt-roct'].prefix
@@ -268,22 +272,30 @@ class RocmOpenmpExtras(Package):
omp_lib_dir = '{0}/lib'.format(openmp_extras_prefix)
bin_dir = '{0}/bin'.format(llvm_prefix)
lib_dir = '{0}/lib'.format(llvm_prefix)
+ flang_warning = '-Wno-incompatible-pointer-types-discards-qualifiers'
+ libpgmath = '/rocm-openmp-extras/flang/runtime/libpgmath/lib/common'
+ elfutils_inc = spec['elfutils'].prefix.include
# flang1 and flang2 symlink needed for build of flang-runtime
# libdevice symlink to rocm-openmp-extras for runtime
# libdebug symlink to rocm-openmp-extras for runtime
- if not (os.path.islink((os.path.join(bin_dir, 'flang1')))):
- os.symlink(os.path.join(omp_bin_dir, 'flang1'),
- os.path.join(bin_dir, 'flang1'))
- if not (os.path.islink((os.path.join(bin_dir, 'flang2')))):
- os.symlink(os.path.join(omp_bin_dir, 'flang2'),
- os.path.join(bin_dir, 'flang2'))
- if not (os.path.islink((os.path.join(lib_dir, 'libdevice')))):
- os.symlink(os.path.join(omp_lib_dir, 'libdevice'),
- os.path.join(lib_dir, 'libdevice'))
- if not (os.path.islink((os.path.join(llvm_prefix, 'lib-debug')))):
- os.symlink(os.path.join(openmp_extras_prefix, 'lib-debug'),
- os.path.join(llvm_prefix, 'lib-debug'))
+ if (os.path.islink((os.path.join(bin_dir, 'flang1')))):
+ os.unlink(os.path.join(bin_dir, 'flang1'))
+ if (os.path.islink((os.path.join(bin_dir, 'flang2')))):
+ os.unlink(os.path.join(bin_dir, 'flang2'))
+ if (os.path.islink((os.path.join(lib_dir, 'libdevice')))):
+ os.unlink(os.path.join(lib_dir, 'libdevice'))
+ if (os.path.islink((os.path.join(llvm_prefix, 'lib-debug')))):
+ os.unlink(os.path.join(llvm_prefix, 'lib-debug'))
+
+ os.symlink(os.path.join(omp_bin_dir, 'flang1'),
+ os.path.join(bin_dir, 'flang1'))
+ os.symlink(os.path.join(omp_bin_dir, 'flang2'),
+ os.path.join(bin_dir, 'flang2'))
+ os.symlink(os.path.join(omp_lib_dir, 'libdevice'),
+ os.path.join(lib_dir, 'libdevice'))
+ os.symlink(os.path.join(openmp_extras_prefix, 'lib-debug'),
+ os.path.join(llvm_prefix, 'lib-debug'))
# Set cmake args
components = dict()
@@ -299,6 +311,11 @@ class RocmOpenmpExtras(Package):
]
# Shared cmake configuration for openmp, openmp-debug
+ # Due to hsa-rocr-dev using libelf instead of elfutils
+ # the build of openmp fails because the include path
+ # for libelf is placed before elfutils in SPACK_INCLUDE_DIRS.
+ # Passing the elfutils include path via cmake options is a
+ # workaround until hsa-rocr-dev switches to elfutils.
openmp_common_args = [
'-DROCM_DIR={0}'.format(hsa_prefix),
'-DDEVICE_LIBS_DIR={0}/amdgcn/bitcode'.format(devlibs_prefix),
@@ -316,7 +333,9 @@ class RocmOpenmpExtras(Package):
'-DOPENMP_ENABLE_LIBOMPTARGET=1',
'-DOPENMP_ENABLE_LIBOMPTARGET_HSA=1',
'-DLLVM_MAIN_INCLUDE_DIR={0}{1}'.format(src, llvm_inc),
- '-DLLVM_INSTALL_PREFIX={0}'.format(llvm_prefix)
+ '-DLLVM_INSTALL_PREFIX={0}'.format(llvm_prefix),
+ '-DCMAKE_C_FLAGS=-isystem{0}'.format(elfutils_inc),
+ '-DCMAKE_CXX_FLAGS=-isystem{0}'.format(elfutils_inc)
]
if self.spec.version < Version('4.1.0'):
@@ -349,6 +368,15 @@ class RocmOpenmpExtras(Package):
'-DCMAKE_Fortran_COMPILER={0}/flang'.format(bin_dir),
'-DLLVM_TARGETS_TO_BUILD=AMDGPU;x86'
]
+ if self.spec.version >= Version('4.2.0'):
+ # Spack thinks some warnings from the flang build are errors.
+ # Disable those warnings in C and CXX flags.
+ flang_common_args += [
+ '-DCMAKE_CXX_FLAGS={0}'.format(flang_warning) +
+ ' -I{0}{1}'.format(src, libpgmath),
+ '-DCMAKE_C_FLAGS={0}'.format(flang_warning) +
+ ' -I{0}{1}'.format(src, libpgmath)
+ ]
components['pgmath'] = [
'../rocm-openmp-extras/flang/runtime/libpgmath'
diff --git a/var/spack/repos/builtin/packages/rocm-tensile/package.py b/var/spack/repos/builtin/packages/rocm-tensile/package.py
index 07ff61f007..ca4ce6ade6 100644
--- a/var/spack/repos/builtin/packages/rocm-tensile/package.py
+++ b/var/spack/repos/builtin/packages/rocm-tensile/package.py
@@ -27,12 +27,15 @@ class RocmTensile(CMakePackage):
version('3.7.0', sha256='488a7f76ea42a7601d0557f53068ec4832a2c7c06bb1b511470a4e35599a5a4d')
version('3.5.0', sha256='71eb3eed6625b08a4cedb539dd9b596e3d4cc82a1a8063d37d94c0765b6f8257')
- tensile_architecture = ('all', 'gfx803', 'gfx900', 'gfx906', 'gfx908')
+ tensile_architecture = ('all', 'gfx906', 'gfx908', 'gfx000', 'gfx900',
+ 'gfx906:xnack-', 'gfx908:xnack-', 'gfx90a:xnack-',
+ 'gfx1010', 'gfx1011', 'gfx1012', 'gfx1030')
variant('build_type', default='Release', values=("Release", "Debug", "RelWithDebInfo"), description='CMake build type')
- variant('tensile_architecture', default='all', values=tensile_architecture, multi=False)
+ variant('tensile_architecture', default='all', values=tensile_architecture, multi=True)
variant('openmp', default=True, description='Enable OpenMP')
-
+ conflicts('tensile_architecture=gfx906', when='@4.0.1:')
+ conflicts('tensile_architecture=gfx908', when='@4.0.1:')
depends_on('cmake@3:', type='build')
# This is the default library format since 3.7.0
depends_on('msgpack-c@3:', when='@3.7:')
@@ -62,8 +65,20 @@ class RocmTensile(CMakePackage):
def setup_build_environment(self, env):
env.set('CXX', self.spec['hip'].hipcc)
- def cmake_args(self):
+ def get_gpulist_for_tensile_support(self):
arch = self.spec.variants['tensile_architecture'].value
+ if arch[0] == 'all':
+ if self.spec.satisfies('@:4.0.0'):
+ arch_value = self.tensile_architecture[1:4]
+ elif self.spec.satisfies('@4.1.0:4.2.0'):
+ arch_value = self.tensile_architecture[3:6]
+ elif self.spec.satisfies('@4.3.0:'):
+ arch_value = self.tensile_architecture[3:]
+ return arch_value
+ else:
+ return arch
+
+ def cmake_args(self):
args = [
self.define('amd_comgr_DIR', self.spec['comgr'].prefix),
self.define('Tensile_COMPILER', 'hipcc'),
@@ -80,10 +95,8 @@ class RocmTensile(CMakePackage):
if '@3.7.0:' in self.spec:
args.append(self.define('Tensile_LIBRARY_FORMAT', 'msgpack'))
- if self.spec.satisfies('@4.1.0:'):
- if arch == 'gfx906' or arch == 'gfx908':
- arch = arch + ':xnack-'
- args.append(self.define('Tensile_ARCHITECTURE', arch))
+ args.append(self.define('Tensile_ARCHITECTURE',
+ self.get_gpulist_for_tensile_support()))
if self.spec.satisfies('^cmake@3.21.0:3.21.2'):
args.append(self.define('__skip_rocmclang', 'ON'))
diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/004-remove-git-download-yaml-cpp-use-yaml-cpp-recipe.patch b/var/spack/repos/builtin/packages/rocm-validation-suite/004-remove-git-download-yaml-cpp-use-yaml-cpp-recipe.patch
new file mode 100644
index 0000000000..6224329bab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rocm-validation-suite/004-remove-git-download-yaml-cpp-use-yaml-cpp-recipe.patch
@@ -0,0 +1,50 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index ff77558..278a732 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -207,36 +207,7 @@ if ( NOT DEFINED CMAKE_PACKAGING_INSTALL_PREFIX )
+ endif ()
+
+ ################################################################################
+-# Download and unpack yaml-cpp at configure time
+-configure_file(CMakeYamlDownload.cmake yaml-download/CMakeLists.txt)
+-execute_process(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}" .
+- RESULT_VARIABLE result
+- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-download )
+-if(result)
+- message(FATAL_ERROR "CMake step for yaml-download failed: ${result}")
+-endif()
+-execute_process(COMMAND ${CMAKE_COMMAND} --build .
+- RESULT_VARIABLE result
+- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-download )
+-if(result)
+- message(FATAL_ERROR "Build step for yaml-download failed: ${result}")
+-endif()
+-execute_process(COMMAND ${CMAKE_COMMAND} ${CMAKE_BINARY_DIR}/yaml-src -B${CMAKE_BINARY_DIR}/yaml-build
+- RESULT_VARIABLE result
+- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-src )
+-if(result)
+- message(FATAL_ERROR "Config step for yaml-src failed: ${result}")
+-endif()
+
+-add_custom_target(rvs_yaml_target
+- DEPENDS ${CMAKE_BINARY_DIR}/yaml-build/libyaml-cpp.a
+-)
+-
+-add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/yaml-build/libyaml-cpp.a
+- COMMAND make -C ${CMAKE_BINARY_DIR}/yaml-build
+- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-src
+- COMMENT "Generating yaml-cpp targets"
+- VERBATIM)
+
+ ################################################################################
+ ## GOOGLE TEST
+@@ -446,7 +417,7 @@ if (RVS_BUILD_TESTS)
+ add_subdirectory(testif.so)
+ endif()
+
+-add_dependencies(rvshelper rvs_bin_folder rvs_doc rvs_yaml_target)
++add_dependencies(rvshelper rvs_bin_folder rvs_doc)
+
+
+ add_dependencies(pesm rvslib rvslibrt)
diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py
index 137b9aa1a5..833cce2fe5 100644
--- a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py
+++ b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py
@@ -35,9 +35,11 @@ class RocmValidationSuite(CMakePackage):
patch('001-fixes-for-rocblas-rocm-smi-install-prefix-path.patch')
patch('002-remove-force-setting-hip-inc-path.patch', when='@4.1.0:')
patch('003-cmake-change-to-remove-installs-and-sudo.patch', when='@4.1.0:')
+ patch('004-remove-git-download-yaml-cpp-use-yaml-cpp-recipe.patch', when='@4.3.0:')
depends_on('cmake@3.5:', type='build')
depends_on('zlib', type='link')
+ depends_on('yaml-cpp~shared')
def setup_build_environment(self, build_env):
spec = self.spec
@@ -55,5 +57,7 @@ class RocmValidationSuite(CMakePackage):
return [
self.define('HIP_INC_DIR', self.spec['hip'].prefix),
self.define('ROCM_SMI_DIR', self.spec['rocm-smi-lib'].prefix),
- self.define('ROCBLAS_DIR', self.spec['rocblas'].prefix)
+ self.define('ROCBLAS_DIR', self.spec['rocblas'].prefix),
+ self.define('YAML_INC_DIR', self.spec['yaml-cpp'].prefix.include),
+ self.define('YAML_LIB_DIR', self.spec['yaml-cpp'].libs.directories[0])
]
diff --git a/var/spack/repos/builtin/packages/rocprofiler-dev/package.py b/var/spack/repos/builtin/packages/rocprofiler-dev/package.py
index 53070ae524..558a4b63ea 100644
--- a/var/spack/repos/builtin/packages/rocprofiler-dev/package.py
+++ b/var/spack/repos/builtin/packages/rocprofiler-dev/package.py
@@ -37,6 +37,8 @@ class RocprofilerDev(CMakePackage):
depends_on('rocminfo@' + ver, when='@' + ver)
depends_on('roctracer-dev-api@' + ver, when='@' + ver)
+ depends_on('numactl', type='link', when='@4.3.1')
+
# See https://github.com/ROCm-Developer-Tools/rocprofiler/pull/50
patch('fix-includes.patch')
diff --git a/var/spack/repos/builtin/packages/rocsolver/link-clients-blas.patch b/var/spack/repos/builtin/packages/rocsolver/link-clients-blas.patch
new file mode 100644
index 0000000000..13dfd31976
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rocsolver/link-clients-blas.patch
@@ -0,0 +1,22 @@
+diff -r -u a/clients/benchmarks/CMakeLists.txt b/clients/benchmarks/CMakeLists.txt
+--- a/clients/benchmarks/CMakeLists.txt 2021-08-14 18:54:53.356456513 -0600
++++ b/clients/benchmarks/CMakeLists.txt 2021-08-14 18:55:25.125354419 -0600
+@@ -19,6 +19,7 @@
+ target_link_libraries( rocsolver-bench PRIVATE
+ cblas
+ lapack
++ blas
+ Threads::Threads
+ hip::device
+ rocsolver-common
+diff -r -u a/clients/gtest/CMakeLists.txt b/clients/gtest/CMakeLists.txt
+--- a/clients/gtest/CMakeLists.txt 2021-08-14 18:54:53.356456513 -0600
++++ b/clients/gtest/CMakeLists.txt 2021-08-14 18:55:16.581112850 -0600
+@@ -89,6 +89,7 @@
+ target_link_libraries( rocsolver-test PRIVATE
+ cblas
+ lapack
++ blas
+ GTest::GTest
+ hip::device
+ rocsolver-common
diff --git a/var/spack/repos/builtin/packages/rocsolver/package.py b/var/spack/repos/builtin/packages/rocsolver/package.py
index 26dbff2759..f5273f545a 100644
--- a/var/spack/repos/builtin/packages/rocsolver/package.py
+++ b/var/spack/repos/builtin/packages/rocsolver/package.py
@@ -42,6 +42,15 @@ class Rocsolver(CMakePackage):
depends_on('cmake@3.8:', type='build', when='@4.1.0:')
depends_on('cmake@3.5:', type='build')
+ depends_on('googletest@1.10.0:', type='test')
+ depends_on('netlib-lapack@3.7.1:', type='test')
+
+ patch('link-clients-blas.patch', when='@4.3.0:')
+
+ def check(self):
+ exe = join_path(self.build_directory, 'clients', 'staging', 'rocsolver-test')
+ self.run_test(exe, options=['--gtest_filter=checkin*'])
+
for ver in ['3.5.0', '3.7.0', '3.8.0', '3.9.0', '3.10.0', '4.0.0', '4.1.0',
'4.2.0', '4.3.0', '4.3.1']:
depends_on('hip@' + ver, when='@' + ver)
@@ -52,7 +61,7 @@ class Rocsolver(CMakePackage):
tgt = self.spec.variants['amdgpu_target'].value
args = [
self.define('BUILD_CLIENTS_SAMPLES', 'OFF'),
- self.define('BUILD_CLIENTS_TESTS', 'OFF'),
+ self.define('BUILD_CLIENTS_TESTS', self.run_tests),
self.define('BUILD_CLIENTS_BENCHMARKS', 'OFF')
]
if self.spec.satisfies('@4.1.0'):
diff --git a/var/spack/repos/builtin/packages/roms/package.py b/var/spack/repos/builtin/packages/roms/package.py
index 0492e9a492..9ef249a900 100644
--- a/var/spack/repos/builtin/packages/roms/package.py
+++ b/var/spack/repos/builtin/packages/roms/package.py
@@ -71,7 +71,8 @@ class Roms(MakefilePackage):
"""
Edit Linux-flang.mk makefile to support AOCC compiler
"""
- fflags = ['-fveclib=AMDLIBM', '-O3', '-ffast-math']
+ fflags = ['-fveclib=AMDLIBM', '-O3', '-ffast-math',
+ '-funroll-loops', '-Mstack_arrays', '-std=f2008']
make_aocc = join_path('Compilers',
'{0}-{1}.mk'.format(self.arch, lib))
diff --git a/var/spack/repos/builtin/packages/runc/package.py b/var/spack/repos/builtin/packages/runc/package.py
new file mode 100644
index 0000000000..8afe0eef9d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/runc/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Runc(MakefilePackage):
+ """CLI tool for spawning containers on Linux according to the OCI specification"""
+
+ homepage = 'https://github.com/opencontainers/runc'
+ url = 'https://github.com/opencontainers/runc/releases/download/v1.0.2/runc.tar.xz'
+ maintainers = ['bernhardkaindl']
+
+ version('1.0.2', sha256='740acb49e33eaf4958b5109c85363c1d3900f242d4cab47fbdbefa6f8f3c6909')
+
+ depends_on('go', type='build')
+ depends_on('go-md2man', type='build')
+ depends_on('pkgconfig', type='build')
+ depends_on('libseccomp')
+
+ def install(self, spec, prefix):
+ make('install', 'PREFIX=' + prefix)
diff --git a/var/spack/repos/builtin/packages/salmon-tddft/cmakefix.patch b/var/spack/repos/builtin/packages/salmon-tddft/cmakefix.patch
index 65ff24bdb4..65ff24bdb4 100755..100644
--- a/var/spack/repos/builtin/packages/salmon-tddft/cmakefix.patch
+++ b/var/spack/repos/builtin/packages/salmon-tddft/cmakefix.patch
diff --git a/var/spack/repos/builtin/packages/salmon-tddft/fjmpi.patch b/var/spack/repos/builtin/packages/salmon-tddft/fjmpi.patch
index 3208d150e8..3208d150e8 100755..100644
--- a/var/spack/repos/builtin/packages/salmon-tddft/fjmpi.patch
+++ b/var/spack/repos/builtin/packages/salmon-tddft/fjmpi.patch
diff --git a/var/spack/repos/builtin/packages/salmon-tddft/package.py b/var/spack/repos/builtin/packages/salmon-tddft/package.py
index ea5e887bb2..ea5e887bb2 100755..100644
--- a/var/spack/repos/builtin/packages/salmon-tddft/package.py
+++ b/var/spack/repos/builtin/packages/salmon-tddft/package.py
diff --git a/var/spack/repos/builtin/packages/salmon-tddft/v2.0.libxc-5.0.patch b/var/spack/repos/builtin/packages/salmon-tddft/v2.0.libxc-5.0.patch
index 956eb93da4..956eb93da4 100755..100644
--- a/var/spack/repos/builtin/packages/salmon-tddft/v2.0.libxc-5.0.patch
+++ b/var/spack/repos/builtin/packages/salmon-tddft/v2.0.libxc-5.0.patch
diff --git a/var/spack/repos/builtin/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py
index 58bf1823f8..439475b393 100644
--- a/var/spack/repos/builtin/packages/samtools/package.py
+++ b/var/spack/repos/builtin/packages/samtools/package.py
@@ -14,6 +14,7 @@ class Samtools(Package):
homepage = "https://www.htslib.org"
url = "https://github.com/samtools/samtools/releases/download/1.13/samtools-1.13.tar.bz2"
+ version('1.14', sha256='9341dabaa98b0ea7d60fd47e42af25df43a7d3d64d8e654cdf852974546b7d74')
version('1.13', sha256='616ca2e051cc8009a1e9c01cfd8c7caf8b70916ddff66f3b76914079465f8c60')
version('1.12', sha256='6da3770563b1c545ca8bdf78cf535e6d1753d6383983c7929245d5dba2902dcb')
version('1.10', sha256='7b9ec5f05d61ec17bd9a82927e45d8ef37f813f79eb03fe06c88377f1bd03585')
@@ -34,6 +35,7 @@ class Samtools(Package):
depends_on('python', type='run')
# htslib became standalone @1.3.1, must use corresponding version
+ depends_on('htslib@1.14', when='@1.14')
depends_on('htslib@1.13', when='@1.13')
depends_on('htslib@1.12', when='@1.12')
depends_on('htslib@1.11', when='@1.11')
@@ -68,8 +70,11 @@ class Samtools(Package):
make('prefix={0}'.format(prefix), 'install')
# Install dev headers and libs for legacy apps depending on them
- mkdir(prefix.include)
- mkdir(prefix.lib)
- install('sam.h', prefix.include)
- install('bam.h', prefix.include)
- install('libbam.a', prefix.lib)
+ # per https://github.com/samtools/samtools/releases/tag/1.14
+ # these have been removed (bam.h still exists but paired down)
+ if spec.satisfies('@:1.13'):
+ mkdir(prefix.include)
+ mkdir(prefix.lib)
+ install('sam.h', prefix.include)
+ install('bam.h', prefix.include)
+ install('libbam.a', prefix.lib)
diff --git a/var/spack/repos/builtin/packages/scale/fj-own_compiler.patch b/var/spack/repos/builtin/packages/scale/fj-own_compiler.patch
new file mode 100644
index 0000000000..fb4aa01e85
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scale/fj-own_compiler.patch
@@ -0,0 +1,51 @@
+diff --git a/sysdep/Makedef.FUGAKU b/sysdep/Makedef.FUGAKU
+index 37d41388a..c08707165 100644
+--- a/sysdep/Makedef.FUGAKU
++++ b/sysdep/Makedef.FUGAKU
+@@ -7,8 +7,8 @@
+ ##### Fortran setting
+ #FFLAGS is set in Mkinclude. FFLAGS_DEBUG is used if SCALE_DEBUG=T
+
+-FC = mpifrtpx
+-SFC = frtpx
++FC = mpifrt
++SFC = frt
+ MODDIROPT = -M
+
+ FFLAGS_FAST = -Kfast,parallel,ocl,preex,array_private,noalias=s,mfunc=2 \
+@@ -34,9 +34,9 @@ FFLAGS_DEBUG = -O0 \
+ ##### C setting
+ #CFLAGS is set in Mkinclude. CFLAGS_DEBUG is used if SCALE_DEBUG=T
+
+-CC = mpifccpx
++CC = mpifcc
+
+-CFLAGS_FAST = -Kfast,parallel,ocl,preex,array_private,region_extension,restp=all -Ksimd
++CFLAGS_FAST = -Kfast,ocl,preex -Ksimd
+ CFLAGS_DEBUG = -O0
+
+
+diff --git a/sysdep/Makedef.FX100 b/sysdep/Makedef.FX100
+index 30077bd21..a26d29928 100644
+--- a/sysdep/Makedef.FX100
++++ b/sysdep/Makedef.FX100
+@@ -7,8 +7,8 @@
+ ##### Fortran setting
+ #FFLAGS is set in Mkinclude. FFLAGS_DEBUG is used if SCALE_DEBUG=T
+
+-FC = mpifrtpx
+-SFC = frtpx
++FC = mpifrt
++SFC = frt
+ MODDIROPT = -M
+
+ FFLAGS_FAST = -Kfast,parallel,ocl,preex,array_private,noalias=s,mfunc=2 \
+@@ -35,7 +35,7 @@ FFLAGS_DEBUG = -O0 \
+ ##### C setting
+ #CFLAGS is set in Mkinclude. CFLAGS_DEBUG is used if SCALE_DEBUG=T
+
+-CC = mpifccpx
++CC = mpifcc
+
+ CFLAGS_FAST = -Kfast,parallel,ocl,preex,array_private,region_extension,restp=all -Ksimd
+ CFLAGS_DEBUG = -O0
diff --git a/var/spack/repos/builtin/packages/scale/package.py b/var/spack/repos/builtin/packages/scale/package.py
new file mode 100644
index 0000000000..460089e123
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scale/package.py
@@ -0,0 +1,84 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+
+from spack import *
+
+
+class Scale(MakefilePackage):
+ """SCALE (Scalable Computing for Advanced Library and Environment) is
+ a basic library for weather and climate model of the earth and planets
+ aimed to be widely used in various models.
+ The SCALE library is developed with co-design by researchers of
+ computational science and computer science."""
+ homepage = "https://scale.riken.jp/"
+ url = "https://scale.riken.jp/archives/scale-5.4.4.tar.gz"
+
+ maintainers = ['t-yamaura']
+
+ version('5.4.4', sha256='7d0ec4069c15d8b9ec7166f32c9a2eda772d975a8e08e420e6b16891ceebb316', preferred=True)
+ version('5.3.6', sha256='3ab0d42cdb16eee568c65b880899e861e464e92088ceb525066c726f31d04848')
+ version('5.2.6', sha256='e63141d05810e3f41fc89c9eb15e2319d753832adabdac8f7c8dd7acc0f5f8ed')
+
+ depends_on('openmpi', type=('build', 'link', 'run'))
+ depends_on('netcdf-c')
+ depends_on('netcdf-fortran')
+ depends_on('parallel-netcdf')
+
+ patch('fj-own_compiler.patch', when='%fj')
+
+ parallel = False
+
+ def setup_build_environment(self, build_env):
+ build_env.set('PREFIX', self.prefix)
+
+ def build(self, spec, prefix):
+ scale_sys_str = ''
+ if self.spec.satisfies('platform=linux %gcc'):
+ scale_sys_str = 'Linux64-gnu-ompi'
+ elif self.spec.satisfies('platform=linux %intel'):
+ scale_sys_str = 'Linux64-intel-impi'
+ elif self.spec.satisfies('platform=linux %pgi'):
+ scale_sys_str = 'Linux64-pgi-ompi'
+ elif self.spec.satisfies('platform=linux target=arm %gcc'):
+ scale_sys_str = 'LinuxARM-gnu-ompi'
+ elif self.spec.satisfies('platform=linux target=a64fx %fj'):
+ scale_sys_str = 'FUGAKU'
+ elif self.spec.satisfies('platform=linux target=s64fx %fj'):
+ scale_sys_str = 'FX100'
+ elif self.spec.satisfies('platform=darwin %gcc'):
+ scale_sys_str = 'MacOSX-gnu-ompi'
+
+ if scale_sys_str == '':
+ raise InstallError('unsupported arch and compiler combination.')
+ env['SCALE_SYS'] = scale_sys_str
+
+ # set SCALE_NETCDF_INCLUDE
+ nc_config = which('nc-config')
+ nc_str = nc_config('--cflags', '--fflags', output=str)
+ try:
+ env['SCALE_NETCDF_INCLUDE'] = nc_str.replace('\n', ' ')
+ except TypeError: # for python3
+ env['SCALE_NETCDF_INCLUDE'] = nc_str.decode().replace('\n', ' ')
+
+ # set SCALE_NETCDF_LIBS
+ nc_config = which('nc-config')
+ nc_str = nc_config('--libs', '--flibs', output=str)
+ try:
+ env['SCALE_NETCDF_LIBS'] = nc_str.replace('\n', ' ')
+ except TypeError: # for python3
+ env['SCALE_NETCDF_LIBS'] = nc_str.decode().replace('\n', ' ')
+
+ make()
+
+ def install(self, spec, prefix):
+ make('install')
+
+ install_tree('bin', prefix.bin)
+ install_tree('lib', prefix.lib)
+ install_tree('doc', prefix.share.docs)
+ install_tree(os.path.join('scale-rm', 'test'),
+ os.path.join(prefix.share, 'test'))
diff --git a/var/spack/repos/builtin/packages/seacas/package.py b/var/spack/repos/builtin/packages/seacas/package.py
index bc2c4d1592..cd389ce56c 100644
--- a/var/spack/repos/builtin/packages/seacas/package.py
+++ b/var/spack/repos/builtin/packages/seacas/package.py
@@ -96,6 +96,7 @@ class Seacas(CMakePackage):
with when('+metis'):
depends_on('metis+int64+real64')
depends_on('parmetis+int64', when='+mpi')
+ depends_on('libx11', when='+x11')
# The Faodel TPL is only supported in seacas@2021-04-05:
depends_on('faodel@1.2108.1:+mpi', when='+faodel +mpi')
@@ -126,6 +127,8 @@ class Seacas(CMakePackage):
'-DTPL_ENABLE_MPI:BOOL=ON',
'-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix,
])
+ else:
+ '-DTPL_ENABLE_MPI:BOOL=OFF'
options.extend([
'-DSEACASProj_ENABLE_TESTS:BOOL=ON',
diff --git a/var/spack/repos/builtin/packages/seal/package.py b/var/spack/repos/builtin/packages/seal/package.py
new file mode 100644
index 0000000000..3fa81bba0b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/seal/package.py
@@ -0,0 +1,30 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Seal(CMakePackage):
+ """Microsoft SEAL is an easy-to-use open-source (MIT licensed)
+ homomorphic encryption library developed by the Cryptography and Privacy
+ Research Group at Microsoft. Microsoft SEAL is written in modern standard
+ C++ and is easy to compile and run in many different environments. For
+ more information about the Microsoft SEAL project, see sealcrypto.org."""
+
+ homepage = "https://github.com/microsoft/SEAL"
+ url = "https://github.com/microsoft/SEAL/archive/refs/tags/v3.7.1.tar.gz"
+
+ maintainers = ['wohlbier']
+
+ version('3.7.1', sha256='6737177bfb582cc1a2863ef1e96cc6c39b119257e7192981a3190eb79e0fcfd3')
+ version('3.7.0', sha256='06ea835d6c9cdbbc4edb72a8db4bd4b1115995f075774043b9f31938d0624543')
+ version('3.6.6', sha256='85a63188a5ccc8d61b0adbb92e84af9b7223fc494d33260fa17a121433790a0e')
+ version('3.6.5', sha256='77bfcb4a8b785206c419cdf7aff8c200250691518eeddc958f874d1f567b2872')
+ version('3.6.4', sha256='7392574fe3b757d5ced8cc973b23a7b69be0cd35b6e778b3c2447598e9ece5b3')
+ version('3.6.3', sha256='aeecdf79afba5f83d1828b3525760c04e52928614038e9a860773943d5d14558')
+ version('3.6.2', sha256='1e2a97deb1f5b543640fc37d7b4737cab2a9849f616c13ff40ad3be4cf29fb9c')
+ version('3.6.1', sha256='e399c0df7fb60ad450a0ccfdc81b99d19308d0fc1f730d4cad4748dfb2fdb516')
+ version('3.6.0', sha256='79c0e45bf301f4577a7633b14e8b26e37eefc89fd4f6a29d13f87e5f22a372ad')
+ version('3.5.9', sha256='23bf3bf7ae1dae5dae271244a5baa66fa01856c52e263fe8368c3a40f2399fc7')
diff --git a/var/spack/repos/builtin/packages/sensei/package.py b/var/spack/repos/builtin/packages/sensei/package.py
index d4ab68964a..1e47210cf7 100644
--- a/var/spack/repos/builtin/packages/sensei/package.py
+++ b/var/spack/repos/builtin/packages/sensei/package.py
@@ -30,7 +30,6 @@ class Sensei(CMakePackage):
version('1.0.0', sha256='5b8609352048e048e065a7b99f615a602f84b3329085e40274341488ef1b9522')
variant('shared', default=True, description='Enables shared libraries')
- variant('sencore', default=True, description='Enables the SENSEI core library')
variant('ascent', default=False, description='Build with ParaView-Catalyst support')
variant('catalyst', default=False, description='Build with ParaView-Catalyst support')
variant('libsim', default=False, description='Build with VisIt-Libsim support')
@@ -39,8 +38,7 @@ class Sensei(CMakePackage):
variant('hdf5', default=False, description='Enables HDF5 adaptors and endpoints')
variant('vtkm', default=False, description='Enable VTKm adaptors and endpoints')
variant('python', default=False, description='Enable Python bindings')
- variant('miniapps', default=True, description='Enable the parallel 3D and oscillators miniapps')
- variant('cxxstd', default='11', values=('11', '14', '17'), multi=False, description='Use the specified C++ standard when building.')
+ variant('miniapps', default=False, description='Enable the parallel 3D and oscillators miniapps')
# All SENSEI versions up to 2.1.1 support only Python 2, so in this case
# Paraview 6 cannot be used since it requires Python 3. Starting from
@@ -85,11 +83,8 @@ class Sensei(CMakePackage):
# -Ox flags are set by default in CMake based on the build type
args = [
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
- self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
- self.define('CMAKE_C_STANDARD', 11),
self.define('SENSEI_USE_EXTERNAL_pugixml', True),
- self.define('CMAKE_POSITION_INDEPENDENT_CODE', True),
- self.define_from_variant('ENABLE_SENSEI', 'sencore'),
+ self.define('ENABLE_SENSEI', True),
self.define_from_variant('ENABLE_ASCENT', 'ascent'),
self.define_from_variant('ENABLE_VTKM', 'vtkm'),
self.define_from_variant('ENABLE_CATALYST', 'catalyst'),
diff --git a/var/spack/repos/builtin/packages/sfcio/package.py b/var/spack/repos/builtin/packages/sfcio/package.py
index ed9d65ebbe..78d9c612c0 100644
--- a/var/spack/repos/builtin/packages/sfcio/package.py
+++ b/var/spack/repos/builtin/packages/sfcio/package.py
@@ -18,3 +18,10 @@ class Sfcio(CMakePackage):
maintainers = ['t-brown', 'kgerheiser', 'Hang-Lei-NOAA', 'edwardhartnett']
version('1.4.1', sha256='d9f900cf18ec1a839b4128c069b1336317ffc682086283443354896746b89c59')
+
+ def setup_run_environment(self, env):
+ lib = find_libraries('libsfcio', root=self.prefix, shared=False, recursive=True)
+ # Only one library version, but still need to set _4 to make NCO happy
+ for suffix in ('4', ''):
+ env.set('SFCIO_LIB' + suffix, lib[0])
+ env.set('SFCIO_INC' + suffix, join_path(self.prefix, 'include'))
diff --git a/var/spack/repos/builtin/packages/sherpa/package.py b/var/spack/repos/builtin/packages/sherpa/package.py
new file mode 100644
index 0000000000..e3d5922739
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sherpa/package.py
@@ -0,0 +1,129 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Sherpa(AutotoolsPackage):
+ """Sherpa is a Monte Carlo event generator for the Simulation of
+ High-Energy Reactions of PArticles in lepton-lepton, lepton-photon,
+ photon-photon, lepton-hadron and hadron-hadron collisions."""
+
+ homepage = "https://sherpa-team.gitlab.io"
+ url = "https://gitlab.com/sherpa-team/sherpa/-/archive/v2.2.11/sherpa-v2.2.11.tar.gz"
+ list_url = "https://gitlab.com/sherpa-team/sherpa/-/tags"
+ git = "https://gitlab.com/sherpa-team/sherpa.git"
+
+ tags = ['hep', 'eic']
+
+ maintainers = ['wdconinc', 'vvolkl']
+ version('2.2.12', sha256='4ba78098e45aaac0bc303d1b5abdc15809f30b407abf9457d99b55e63384c83d')
+ version('2.2.11', sha256='5e12761988b41429f1d104f84fdf352775d233cde7a165eb64e14dcc20c3e1bd')
+ version('2.2.10', sha256='ae23bc8fdcc9f8c26becc41692822233b62203cd72a7e0dab2ca19316aa0aad7')
+ version('2.2.9', sha256='ebc836d42269a0c4049d3fc439a983d19d12595d9a06db2d18765bd1e301923e')
+ version('2.2.8', sha256='ff198cbae5de445e6fe383151021ef24b1628dffc0da6bf3737753f6672a0091')
+ version('2.0.0', sha256='0e873b27bb1be46ca5ed451d1b8514ca84c10221057b11be5952180076e6f848')
+ version('1.3.1', sha256='31881207838d341358db64e3fdadfeee1ea2f6d1cb42f370014f622f579159ae')
+ version('1.3.0', sha256='08b13c65b66f2edde6996d2a06762a12a0682ffb64bca43654df47321e5039a0')
+ version('1.2.3', sha256='029727337a430d6675a1a12dce3ced0411041e79ddaf4ce3b9466035cf6c8804')
+ version('1.2.2', sha256='6e7b5ea80b99f1378519009e494030d6cf4c4491f91218d749eabb8ffaad9ac1')
+ version('1.2.1', sha256='838462f4a1e8768135363aa6b8532fd8f5e5789a269b858f8e3728ab37f6a1d1')
+ version('1.2.0', sha256='509508fd0ad72aaf55ab484da8b6bc0b31688c955adcda62a3e8f94689cebf99')
+ version('1.1.3', sha256='6335e5eb1fc304e9618496d3ddb198b3591e57b27db6e876af8fd649a8b98c93')
+ version('1.1.2', sha256='e1689cad6700dc013af0afb0d33729ac2b5e9841d2f325c85b10d773e7f8a80e')
+ version('1.1.1', sha256='b80e1d75934be79b73400d2c95d96e88651626ea29ddcb9d8fde9c1812039e29')
+ version('1.1.0', sha256='8052d137d668353dc710f8691b921e772820d39e20361f0d616ee2da1ac798f2')
+ version('1.0.9', sha256='fe28db91ea8264364395c7e5efeeae3e5c01ea1343e0db7fe13924c6f17fb963')
+ version('1.0.8', sha256='6e346bafd13b5b05ad566a73759da6d5e64d65c5036780cc4911d93277e891fa')
+ version('1.0.7', sha256='d1eeefd96c6822ea8eb926447ca91ec4a1c714e4746323e92b1e17764e51ff0b')
+ version('1.0.6', sha256='358d417ec3afde24618c222bc9b742bc5102d435622b3cd6f2e3f72d03656255')
+
+ _cxxstd_values = ('11', '14', '17')
+ variant('cxxstd', default='11', values=_cxxstd_values, multi=False,
+ description='Use the specified C++ standard when building')
+
+ variant('analysis', default=True, description='Enable analysis components')
+ variant('mpi', default=False, description='Enable MPI')
+ variant('python', default=False, description='Enable Python API')
+ variant('hepmc2', default=True, description='Enable HepMC (version 2.x) support')
+ variant('hepmc3', default=True, description='Enable HepMC (version 3.x) support')
+ variant('hepmc3root', default=False, description='Enable HepMC (version 3.1+) ROOT support')
+ variant('rivet', default=False, description='Enable Rivet support')
+ variant('fastjet', default=True, description='Enable FASTJET')
+ variant('openloops', default=False, description='Enable OpenLoops')
+ variant('recola', default=False, description='Enable Recola')
+ variant('lhole', default=False, description='Enable Les Houches One-Loop Generator interface')
+ variant('root', default=False, description='Enable ROOT support')
+ variant('lhapdf', default=True, description='Enable LHAPDF support')
+ variant('gzip', default=False, description='Enable gzip support')
+ variant('pythia', default=True, description='Enable fragmentation/decay interface to Pythia')
+
+ # Note that the delphes integration seems utterly broken: https://sherpa.hepforge.org/trac/ticket/305
+
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('m4', type='build')
+ depends_on('texinfo', type='build')
+ depends_on('sqlite')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('python', when='+python')
+ depends_on('swig', when='+python', type='build')
+ depends_on('hepmc', when='+hepmc2')
+ depends_on('hepmc3', when='+hepmc3')
+ depends_on('hepmc3 +rootio', when='+hepmc3root')
+ depends_on('rivet', when='+rivet')
+ depends_on('fastjet', when='+fastjet')
+ depends_on('openloops', when='+openloops')
+ depends_on('recola', when='+recola')
+ depends_on('root', when='+root')
+ depends_on('lhapdf', when='+lhapdf')
+ depends_on('gzip', when='+gzip')
+ depends_on('pythia6', when='+pythia')
+
+ for std in _cxxstd_values:
+ depends_on('root cxxstd=' + std, when='+root cxxstd=' + std)
+
+ def patch(self):
+ filter_file(r'#include <sys/sysctl.h>',
+ '#ifdef ARCH_DARWIN\n#include <sys/sysctl.h>\n#endif',
+ 'ATOOLS/Org/Run_Parameter.C')
+
+ def configure_args(self):
+ args = []
+ args.append('--enable-shared')
+ args.append('--enable-binreloc')
+ args.append('--enable-static')
+ args.append('--enable-hepevtsize=200000')
+ args.append('--with-sqlite3=' + self.spec['sqlite'].prefix)
+ args.extend(self.enable_or_disable('mpi'))
+ if self.spec.satisfies('+python'):
+ args.append('--enable-pyext')
+ args.extend(self.enable_or_disable('analysis'))
+ args.extend(self.enable_or_disable('lhole'))
+ args.extend(self.enable_or_disable('gzip'))
+ args.extend(self.enable_or_disable('pythia'))
+ if self.spec.satisfies('+hepmc2'):
+ args.append('--enable-hepmc2=' + self.spec['hepmc'].prefix)
+ if self.spec.satisfies('+hepmc3'):
+ args.append('--enable-hepmc3=' + self.spec['hepmc3'].prefix)
+ if self.spec.satisfies('+rivet'):
+ args.append('--enable-rivet=' + self.spec['rivet'].prefix)
+ if self.spec.satisfies('+fastjet'):
+ args.append('--enable-fastjet=' + self.spec['fastjet'].prefix)
+ if self.spec.satisfies('+openloops'):
+ args.append('--enable-openloops=' + self.spec['openloops'].prefix)
+ if self.spec.satisfies('+recola'):
+ args.append('--enable-recola=' + self.spec['recola'].prefix)
+ if self.spec.satisfies('+root'):
+ args.append('--enable-root=' + self.spec['root'].prefix)
+ if self.spec.satisfies('+lhapdf'):
+ args.append('--enable-lhapdf=' + self.spec['lhapdf'].prefix)
+ if self.spec.satisfies('+hztool'):
+ args.append('--enable-hztool=' + self.spec['hztool'].prefix)
+ if self.spec.satisfies('+cernlib'):
+ args.append('--enable-cernlib=' + self.spec['cernlib'].prefix)
+ return args
diff --git a/var/spack/repos/builtin/packages/sigio/package.py b/var/spack/repos/builtin/packages/sigio/package.py
index ba63a5a013..97fb108848 100644
--- a/var/spack/repos/builtin/packages/sigio/package.py
+++ b/var/spack/repos/builtin/packages/sigio/package.py
@@ -18,3 +18,10 @@ class Sigio(CMakePackage):
maintainers = ['t-brown', 'kgerheiser', 'Hang-Lei-NOAA', 'edwardhartnett']
version('2.3.2', sha256='333f3cf3a97f97103cbafcafc2ad89b24faa55b1332a98adc1637855e8a5b613')
+
+ def setup_run_environment(self, env):
+ lib = find_libraries('libsigio', root=self.prefix, shared=False, recursive=True)
+ # Only one library version, but still need to set _4 to make NCO happy
+ for suffix in ('4', ''):
+ env.set('SIGIO_LIB' + suffix, lib[0])
+ env.set('SIGIO_INC' + suffix, join_path(self.prefix, 'include'))
diff --git a/var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch b/var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch
new file mode 100644
index 0000000000..ae74145081
--- /dev/null
+++ b/var/spack/repos/builtin/packages/silo/H5EPR_SEMI_COLON.patch
@@ -0,0 +1,58 @@
+diff --git a/src/hdf5_drv/H5FDsilo.c b/src/hdf5_drv/H5FDsilo.c
+--- a/src/hdf5_drv/H5FDsilo.c
++++ b/src/hdf5_drv/H5FDsilo.c
+@@ -243,6 +243,12 @@
+ return tmp;
+ }
+
++#if HDF5_VERSION_GE(1,10,8)
++#define H5EPR_SEMI_COLON ;
++#else
++#define H5EPR_SEMI_COLON
++#endif
++
+
+ #ifdef H5_HAVE_SNPRINTF
+ #define H5E_PUSH_HELPER(Func,Cls,Maj,Min,Msg,Ret,Errno) \
+@@ -252,13 +258,13 @@
+ snprintf(msg, sizeof(msg), Msg "(errno=%d, \"%s\")", \
+ Errno, strerror(Errno)); \
+ ret_value = Ret; \
+- H5Epush_ret(Func, Cls, Maj, Min, msg, Ret) \
++ H5Epush_ret(Func, Cls, Maj, Min, msg, Ret) H5EPR_SEMI_COLON \
+ }
+ #else
+ #define H5E_PUSH_HELPER(Func,Cls,Maj,Min,Msg,Ret,Errno) \
+ { \
+ ret_value = Ret; \
+- H5Epush_ret(Func, Cls, Maj, Min, Msg, Ret) \
++ H5Epush_ret(Func, Cls, Maj, Min, Msg, Ret) H5EPR_SEMI_COLON \
+ }
+ #endif
+
+@@ -1355,7 +1368,7 @@
+ assert(sizeof(hsize_t)<=8);
+ memcpy(p, &file->block_size, sizeof(hsize_t));
+ if (H5Tconvert(H5T_NATIVE_HSIZE, H5T_STD_U64LE, 1, buf+8, NULL, H5P_DEFAULT)<0)
+- H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1)
++ H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) H5EPR_SEMI_COLON
+
+ return 0;
+ }
+@@ -1383,14 +1396,14 @@
+
+ /* Make sure the name/version number is correct */
+ if (strcmp(name, "LLNLsilo"))
+- H5Epush_ret(func, H5E_ERR_CLS, H5E_FILE, H5E_BADVALUE, "invalid silo superblock", -1)
++ H5Epush_ret(func, H5E_ERR_CLS, H5E_FILE, H5E_BADVALUE, "invalid silo superblock", -1) H5EPR_SEMI_COLON
+
+ buf += 8;
+ /* Decode block size */
+ assert(sizeof(hsize_t)<=8);
+ memcpy(x, buf, 8);
+ if (H5Tconvert(H5T_STD_U64LE, H5T_NATIVE_HSIZE, 1, x, NULL, H5P_DEFAULT)<0)
+- H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1)
++ H5Epush_ret(func, H5E_ERR_CLS, H5E_DATATYPE, H5E_CANTCONVERT, "can't convert superblock info", -1) H5EPR_SEMI_COLON
+ ap = (hsize_t*)x;
+ /*file->block_size = *ap; ignore stored value for now */
+
diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py
index 3f082a6753..0828c1ed7e 100644
--- a/var/spack/repos/builtin/packages/silo/package.py
+++ b/var/spack/repos/builtin/packages/silo/package.py
@@ -44,7 +44,6 @@ class Silo(AutotoolsPackage):
depends_on('automake', type='build', when='+shared')
depends_on('libtool', type='build', when='+shared')
depends_on('mpi', when='+mpi')
- depends_on('hdf5@:1.10', when='@:4.10.2+hdf5')
depends_on('hdf5', when='+hdf5')
depends_on('qt+gui~framework@4.8:4.9', when='+silex')
depends_on('libx11', when='+silex')
@@ -55,7 +54,11 @@ class Silo(AutotoolsPackage):
patch('remove-mpiposix.patch', when='@4.8:4.10.2')
patch('H5FD_class_t-terminate.patch', when='@:4.10.2 ^hdf5@1.10.0:')
+ # H5EPR_SEMI_COLON.patch should be applied only to silo@4.11 when building
+ # with hdf5@1.10.8 or later 1.10 or with hdf5@1.12.1 or later 1.12
+ patch('H5EPR_SEMI_COLON.patch', when='@:4.11 ^hdf5@1.10.8:1.10,1.12.1:1.12')
+ conflicts('hdf5@1.10.8:', when="@:4.10.2")
conflicts('+hzip', when="@4.11-bsd")
conflicts('+fpzip', when="@4.11-bsd")
conflicts('+hzip', when="@4.10.2-bsd")
diff --git a/var/spack/repos/builtin/packages/simgrid/package.py b/var/spack/repos/builtin/packages/simgrid/package.py
index b440eaa5fa..66db5a99f6 100644
--- a/var/spack/repos/builtin/packages/simgrid/package.py
+++ b/var/spack/repos/builtin/packages/simgrid/package.py
@@ -14,6 +14,10 @@ class Simgrid(CMakePackage):
url = "https://github.com/simgrid/simgrid/releases/download/v3.27/simgrid-3.27.tar.gz"
git = 'https://framagit.org/simgrid/simgrid.git'
+ maintainers = ['viniciusvgp']
+
+ version('3.29', sha256='83e8afd653555eeb70dc5c0737b88036c7906778ecd3c95806c6bf5535da2ccf')
+ version('3.28', sha256='558276e7f8135ce520d98e1bafa029c6c0f5c2d0e221a3a5e42c378fe0c5ef2c')
version('3.27', sha256='51aeb9de0434066e5fec40e785f5ea9fa934afe7f6bfb4aa627246e765f1d6d7')
version('3.26', sha256='ac50da1eacc5a53b094a988a8ecde09962c29320f346b45e74dd32ab9d9f3e96')
version('3.25', sha256='0b5dcdde64f1246f3daa7673eb1b5bd87663c0a37a2c5dcd43f976885c6d0b46',
diff --git a/var/spack/repos/builtin/packages/singularity/package.py b/var/spack/repos/builtin/packages/singularity/package.py
index a53605b968..15ca4ec3f7 100644
--- a/var/spack/repos/builtin/packages/singularity/package.py
+++ b/var/spack/repos/builtin/packages/singularity/package.py
@@ -21,11 +21,12 @@ class Singularity(SingularityBase):
'''
homepage = "https://singularity.hpcng.org/"
git = "https://github.com/hpcng/singularity.git"
- url = "https://github.com/hpcng/singularity/releases/download/v3.8.3/singularity-3.8.3.tar.gz"
+ url = "https://github.com/hpcng/singularity/releases/download/v3.8.5/singularity-3.8.5.tar.gz"
maintainers = ['alalazo']
version('master', branch='master')
+ version('3.8.5', sha256='7fff78b5c07b5d4d08269bd267ac5e994390f933321e54efd6b7c86683153ce4')
version('3.8.3', sha256='2e22eb9ee1b73fdd51b8783149f0e4d83c0d2d8a0c1edf6034157d50eeefb835')
version('3.8.0', sha256='e9608b0e0a8c805218bbe795e9176484837b2f7fcb95e5469b853b3809a2412e')
version('3.7.4', sha256='c266369a8bf2747f44e0759858c3fc3b2325b975a8818b2668f0b97b124d0164')
diff --git a/var/spack/repos/builtin/packages/singularityce/package.py b/var/spack/repos/builtin/packages/singularityce/package.py
index 8b4fea32ff..750e15dbd5 100644
--- a/var/spack/repos/builtin/packages/singularityce/package.py
+++ b/var/spack/repos/builtin/packages/singularityce/package.py
@@ -16,7 +16,7 @@ class SingularityBase(MakefilePackage):
variant('network', default=True, description='install network plugins')
depends_on('pkgconfig', type='build')
- depends_on('go')
+ depends_on('go@1.16:')
depends_on('uuid')
depends_on('libgpg-error')
depends_on('libseccomp')
@@ -25,6 +25,8 @@ class SingularityBase(MakefilePackage):
depends_on('shadow', type='run', when='@3.3:')
depends_on('cryptsetup', type=('build', 'run'), when='@3.4:')
+ conflicts('platform=darwin', msg='singularity requires a Linux VM on Windows & Mac')
+
# Go has novel ideas about how projects should be organized.
# We'll point GOPATH at the stage dir, and move the unpacked src
# tree into the proper subdir in our overridden do_stage below.
@@ -160,10 +162,11 @@ class Singularityce(SingularityBase):
'''
homepage = "https://sylabs.io/singularity/"
- url = "https://github.com/sylabs/singularity/releases/download/v3.8.0/singularity-ce-3.8.0.tar.gz"
+ url = "https://github.com/sylabs/singularity/releases/download/v3.9.1/singularity-ce-3.9.1.tar.gz"
git = "https://github.com/sylabs/singularity.git"
maintainers = ['alalazo']
version('master', branch='master')
+ version('3.9.1', sha256='1ba3bb1719a420f48e9b0a6afdb5011f6c786d0f107ef272528c632fff9fd153')
version('3.8.0', sha256='5fa2c0e7ef2b814d8aa170826b833f91e5031a85d85cd1292a234e6c55da1be1')
diff --git a/var/spack/repos/builtin/packages/sirius/package.py b/var/spack/repos/builtin/packages/sirius/package.py
index 7574b01048..b5ce00d1f8 100644
--- a/var/spack/repos/builtin/packages/sirius/package.py
+++ b/var/spack/repos/builtin/packages/sirius/package.py
@@ -21,6 +21,7 @@ class Sirius(CMakePackage, CudaPackage):
version('develop', branch='develop')
version('master', branch='master')
+ version('7.3.0', sha256='69b5cf356adbe181be6c919032859c4e0160901ff42a885d7e7ea0f38cc772e2')
version('7.2.7', sha256='929bf7f131a4847624858b9c4295532c24b0c06f6dcef5453c0dfc33fb78eb03')
version('7.2.6', sha256='e751fd46cdc7c481ab23b0839d3f27fb00b75dc61dc22a650c92fe8e35336e3a')
version('7.2.5', sha256='794e03d4da91025f77542d3d593d87a8c74e980394f658a0210a4fd91c011f22')
diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py
index 97360e0b19..df705897d7 100644
--- a/var/spack/repos/builtin/packages/slepc/package.py
+++ b/var/spack/repos/builtin/packages/slepc/package.py
@@ -13,7 +13,7 @@ class Slepc(Package, CudaPackage, ROCmPackage):
"""Scalable Library for Eigenvalue Problem Computations."""
homepage = "https://slepc.upv.es"
- url = "https://slepc.upv.es/download/distrib/slepc-3.16.0.tar.gz"
+ url = "https://slepc.upv.es/download/distrib/slepc-3.16.1.tar.gz"
git = "https://gitlab.com/slepc/slepc.git"
maintainers = ['joseeroman', 'balay']
@@ -22,6 +22,7 @@ class Slepc(Package, CudaPackage, ROCmPackage):
test_requires_compiler = True
version('main', branch='main')
+ version('3.16.1', sha256='b1a8ad8db1ad88c60616e661ab48fc235d5a8b6965023cb6d691b9a2cfa94efb')
version('3.16.0', sha256='be7292b85430e52210eb389c4f434b67164e96d19498585e82d117e850d477f4')
version('3.15.2', sha256='15fd317c4dd07bb41a994ad4c27271a6675af5f2abe40b82a64a27eaae2e632a')
version('3.15.1', sha256='9c7c3a45f0d9df51decf357abe090ef05114c38a69b7836386a19a96fb203aea')
diff --git a/var/spack/repos/builtin/packages/slirp4netns/package.py b/var/spack/repos/builtin/packages/slirp4netns/package.py
new file mode 100644
index 0000000000..cf294f7390
--- /dev/null
+++ b/var/spack/repos/builtin/packages/slirp4netns/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Slirp4netns(AutotoolsPackage):
+ """User-mode networking for unprivileged network namespaces"""
+
+ homepage = 'https://github.com/rootless-containers/slirp4netns'
+ url = 'https://github.com/rootless-containers/slirp4netns/archive/v1.1.12.tar.gz'
+ maintainers = ['bernhardkaindl']
+
+ version('1.1.12', sha256='279dfe58a61b9d769f620b6c0552edd93daba75d7761f7c3742ec4d26aaa2962')
+
+ depends_on('autoconf', type='build', when='@1.1.12')
+ depends_on('automake', type='build', when='@1.1.12')
+ depends_on('libtool', type='build', when='@1.1.12')
+ depends_on('pkgconfig', type='build')
+ depends_on('glib')
+ depends_on('libcap')
+ depends_on('libseccomp')
+ depends_on('libslirp')
diff --git a/var/spack/repos/builtin/packages/soapdenovo2/package.py b/var/spack/repos/builtin/packages/soapdenovo2/package.py
index 82e0172ab0..1804f34281 100644
--- a/var/spack/repos/builtin/packages/soapdenovo2/package.py
+++ b/var/spack/repos/builtin/packages/soapdenovo2/package.py
@@ -19,6 +19,14 @@ class Soapdenovo2(MakefilePackage):
version('242', sha256='a0043ceb41bc17a1c3fd2b8abe4f9029a60ad3edceb2b15af3c2cfabd36aa11b')
version('240', sha256='cc9e9f216072c0bbcace5efdead947e1c3f41f09baec5508c7b90f933a090909')
+ def flag_handler(self, name, flags):
+ if self.spec.satisfies('%gcc@10:'):
+ if name == 'cflags' or name == 'CFLAGS':
+ flags.append('-fcommon')
+ if name == 'cxxflags' or name == 'CXXFLAGS':
+ flags.append('-fcommon')
+ return (flags, None, None)
+
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('SOAPdenovo-63mer', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/sollve/package.py b/var/spack/repos/builtin/packages/sollve/package.py
index f617f636b6..8acfa7fb9a 100644
--- a/var/spack/repos/builtin/packages/sollve/package.py
+++ b/var/spack/repos/builtin/packages/sollve/package.py
@@ -133,7 +133,6 @@ class Sollve(CMakePackage):
revision=372427, destination='projects',
when='@1.0a2+internal_unwind', placement='libunwind')
- conflicts('+clang_extra', when='~clang')
conflicts('+lldb', when='~clang')
conflicts('%gcc@:5.0')
diff --git a/var/spack/repos/builtin/packages/sp/package.py b/var/spack/repos/builtin/packages/sp/package.py
index c374df97c9..fe90d5b01e 100644
--- a/var/spack/repos/builtin/packages/sp/package.py
+++ b/var/spack/repos/builtin/packages/sp/package.py
@@ -17,3 +17,10 @@ class Sp(CMakePackage):
maintainers = ['t-brown', 'kgerheiser', 'edwardhartnett', 'Hang-Lei-NOAA']
version('2.3.3', sha256='c0d465209e599de3c0193e65671e290e9f422f659f1da928505489a3edeab99f')
+
+ def setup_run_environment(self, env):
+ for suffix in ('4', '8', 'd'):
+ lib = find_libraries('libsp_' + suffix, root=self.prefix,
+ shared=False, recursive=True)
+ env.set('SP_LIB' + suffix, lib[0])
+ env.set('SP_INC' + suffix, 'include_' + suffix)
diff --git a/var/spack/repos/builtin/packages/spack/package.py b/var/spack/repos/builtin/packages/spack/package.py
index 9149b7ea65..2abe10af61 100644
--- a/var/spack/repos/builtin/packages/spack/package.py
+++ b/var/spack/repos/builtin/packages/spack/package.py
@@ -21,6 +21,7 @@ class Spack(Package):
maintainers = ['haampie']
version('develop', branch='develop')
+ version('0.17.0', sha256='93df99256a892ceefb153d48e2080c01d18e58e27773da2c2a469063d67cb582')
version('0.16.3', sha256='26636a2e2cc066184f12651ac6949f978fc041990dba73934960a4c9c1ea383d')
version('0.16.2', sha256='ed3e5d479732b0ba82489435b4e0f9088571604e789f7ab9bc5ce89030793350')
version('0.16.1', sha256='8d893036b24d9ee0feee41ac33dd66e4fc68d392918f346f8a7a36a69c567567')
@@ -29,7 +30,8 @@ class Spack(Package):
variant('development_tools', default=True, description='Build development dependencies')
# Python (with spack python -i ipython support)
- depends_on('python', type='run')
+ depends_on('python@2.6.0:2.7,3.5:', type='run')
+ depends_on('python@2.7.0:2.7,3.5:', type='run', when='@0.18.0:')
depends_on('py-ipython', type='run')
# Concretizer
@@ -68,11 +70,15 @@ class Spack(Package):
# See https://github.com/spack/spack/pull/24686
# and #25595, #25726, #25853, #25923, #25924 upstream in python/cpython
with when('@:0.16.2'):
- conflicts('python@3.10:')
- conflicts('python@3.9.6:3.9')
- conflicts('python@3.8.11:3.8')
- conflicts('python@3.7.11:3.7')
- conflicts('python@3.6.14:3.6')
+ conflicts('^python@3.10:')
+ conflicts('^python@3.9.6:3.9')
+ conflicts('^python@3.8.11:3.8')
+ conflicts('^python@3.7.11:3.7')
+ conflicts('^python@3.6.14:3.6')
+
+ # https://bugs.python.org/issue45235#msg406121
+ # To be fixed in 3.9.9, no other releases are affected
+ conflicts('^python@3.9.8', when='@:0.17.0')
# Development tools
with when('+development_tools'):
diff --git a/var/spack/repos/builtin/packages/star-ccm-plus/package.py b/var/spack/repos/builtin/packages/star-ccm-plus/package.py
index 8067199485..45c4e8a256 100644
--- a/var/spack/repos/builtin/packages/star-ccm-plus/package.py
+++ b/var/spack/repos/builtin/packages/star-ccm-plus/package.py
@@ -5,6 +5,7 @@
import glob
import os
+import re
from spack import *
@@ -16,6 +17,7 @@ class StarCcmPlus(Package):
url = "file://{0}/STAR-CCM+11.06.010_02_linux-x86_64.tar.gz".format(os.getcwd())
manual_download = True
+ version('16.06.008_01', sha256='64577ec0e9a98d971114e68c4eec05bb746e061dfbf77b8d8919583c796c9e4b')
version('11.06.010_02', 'd349c6ac8293d8e6e7a53533d695588f')
variant('doc', default=False, description='Install the documentation')
@@ -47,12 +49,29 @@ class StarCcmPlus(Package):
env['PS1'] = '>'
env['PROMPT_COMMAND'] = ''
- installer = Executable(glob.glob('*.bin')[0])
+ if '@:12' in spec:
+ file_pattern = '*.bin'
+ else:
+ file_pattern = '*.sh'
+
+ installer = Executable(join_path(self.stage.source_path,
+ glob.glob(file_pattern)[0]))
installer(
'-i', 'silent',
'-DINSTALLDIR={0}'.format(prefix),
'-DINSTALLFLEX=false',
'-DADDSYSTEMPATH=false',
+ '-DCOMPUTE_NODE=false',
'-DNODOC={0}'.format('false' if '+docs' in spec else 'true')
)
+
+ def setup_run_environment(self, env):
+ # using Version.up_to strips out the 0 padding
+ version = re.sub('_.*$', '', format(self.spec.version))
+ env.prepend_path('PATH', join_path(self.prefix, version,
+ 'STAR-View+{0}'.format(version),
+ 'bin'))
+ env.prepend_path('PATH', join_path(self.prefix, version,
+ 'STAR-CCM+{0}'.format(version),
+ 'star', 'bin'))
diff --git a/var/spack/repos/builtin/packages/structure/package.py b/var/spack/repos/builtin/packages/structure/package.py
index a5880171eb..5c8f1f8a9d 100644
--- a/var/spack/repos/builtin/packages/structure/package.py
+++ b/var/spack/repos/builtin/packages/structure/package.py
@@ -15,12 +15,14 @@ class Structure(MakefilePackage):
version('2.3.4', sha256='f2b72b9189a514f53e921bbdc1aa3dbaca7ac34a8467af1f972c7e4fc9c0bb37')
- depends_on('jdk', type=('build', 'run'))
-
def url_for_version(self, version):
url = "http://web.stanford.edu/group/pritchardlab/structure_software/release_versions/v{0}/structure_kernel_source.tar.gz"
return url.format(version)
+ @when('%gcc@10:')
+ def edit(self, spec, prefix):
+ filter_file(r'(CFLAGS =.*$)', '\\1 -fcommon', 'Makefile')
+
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('structure', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py
index 748a598078..7dbd8f3d0d 100644
--- a/var/spack/repos/builtin/packages/suite-sparse/package.py
+++ b/var/spack/repos/builtin/packages/suite-sparse/package.py
@@ -37,13 +37,14 @@ class SuiteSparse(Package):
variant('pic', default=True, description='Build position independent code (required to link with shared libraries)')
variant('cuda', default=False, description='Build with CUDA')
variant('openmp', default=False, description='Build with OpenMP')
+ variant('graphblas', default=False, description='Build with GraphBLAS (takes a long time to compile)')
depends_on('mpfr@4.0.0:', type=('build', 'link'), when='@5.8.0:')
depends_on('gmp', type=('build', 'link'), when='@5.8.0:')
depends_on('blas')
depends_on('lapack')
depends_on('m4', type='build', when='@5.0.0:')
- depends_on('cmake', when='@5.2.0:', type='build')
+ depends_on('cmake', when='+graphblas @5.2.0:', type='build')
depends_on('metis@5.1.0', when='@4.5.1:')
# in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng
@@ -60,15 +61,51 @@ class SuiteSparse(Package):
# This patch adds '-lm' when linking libgraphblas and when using clang.
# Fixes 'libgraphblas.so.2.0.1: undefined reference to `__fpclassify''
- patch('graphblas_libm_dep.patch', when='@5.2.0:5.2%clang')
+ patch('graphblas_libm_dep.patch', when='+graphblas @5.2.0:5.2%clang')
# CUDA-11 dropped sm_30 code generation, remove hardcoded sm_30 from makefile
# open issue: https://github.com/DrTimothyAldenDavis/SuiteSparse/issues/56
# Tested only with 5.9.0, previous versions probably work too
- patch('fix_cuda11.patch', when='@5.9.0:+cuda ^cuda@11:')
+ patch('fix_cuda11.patch', when='@5.9.0:5.10.0+cuda ^cuda@11:')
conflicts('%gcc@:4.8', when='@5.2.0:', msg='gcc version must be at least 4.9 for suite-sparse@5.2.0:')
+ # The @2021.x versions of tbb dropped the task_scheduler_init.h header and
+ # related stuff (which have long been deprecated). This appears to be
+ # rather problematic for suite-sparse (see e.g.
+ # https://github.com/DrTimothyAldenDavis/SuiteSparse/blob/master/SPQR/Source/spqr_parallel.cpp)
+ # Have Spack complain if +tbb and trying to use a 2021.x version of tbb
+ conflicts('+tbb', when='^intel-oneapi-tbb@2021:',
+ msg='suite-sparse needs task_scheduler_init.h dropped in '
+ 'recent tbb libs')
+ conflicts('+tbb', when='^intel-tbb@2021:',
+ msg='suite-sparse needs task_scheduler_init.h dropped in '
+ 'recent tbb libs')
+
+ def symbol_suffix_blas(self, spec, args):
+ """When using BLAS with a special symbol suffix we use defines to
+ replace blas symbols, e.g. dgemm_ becomes dgemm_64_ when
+ symbol_suffix=64_."""
+
+ # Currently only OpenBLAS does this.
+ if not spec.satisfies('^openblas'):
+ return
+
+ suffix = spec['openblas'].variants['symbol_suffix'].value
+ if suffix == 'none':
+ return
+
+ symbols = (
+ 'dtrsv_', 'dgemv_', 'dtrsm_', 'dgemm_', 'dsyrk_', 'dger_', 'dscal_',
+ 'dpotrf_', 'ztrsv_', 'zgemv_', 'ztrsm_', 'zgemm_', 'zherk_',
+ 'zgeru_', 'zscal_', 'zpotrf_',
+ 'dnrm2_', 'dlarf_', 'dlarfg_', 'dlarft_', 'dlarfb_', 'dznrm2_',
+ 'zlarf_', 'zlarfg_', 'zlarft_', 'zlarfb_'
+ )
+
+ for symbol in symbols:
+ args.append('CFLAGS+=-D{0}={1}{2}'.format(symbol, symbol, suffix))
+
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned.
# It's basically a plain Makefile which include an header
@@ -124,6 +161,9 @@ class SuiteSparse(Package):
spec.satisfies('^intel-parallel-studio+mkl+ilp64')):
make_args.append('UMFPACK_CONFIG=-DLONGBLAS="long long"')
+ # Handle symbol suffix of some BLAS'es (e.g. 64_ or _64 for ilp64)
+ self.symbol_suffix_blas(spec, make_args)
+
# SuiteSparse defaults to using '-fno-common -fexceptions' in
# CFLAGS, but not all compilers use the same flags for these
# optimizations
@@ -137,7 +177,7 @@ class SuiteSparse(Package):
make_args += ['CFLAGS+=-DBLAS_NO_UNDERSCORE']
# Intel TBB in SuiteSparseQR
- if 'tbb' in spec:
+ if '+tbb' in spec:
make_args += [
'SPQR_CONFIG=-DHAVE_TBB',
'TBB=%s' % spec['tbb'].libs.ld_flags,
@@ -151,14 +191,42 @@ class SuiteSparse(Package):
'CMAKE_OPTIONS=-DCMAKE_INSTALL_PREFIX=%s' % prefix +
' -DCMAKE_LIBRARY_PATH=%s' % prefix.lib]
- # In those SuiteSparse versions calling "make install" in one go is
- # not possible, mainly because of GraphBLAS. Thus compile first and
- # install in a second run.
- if '@5.4.0:' in self.spec:
- make('library', *make_args)
-
make_args.append('INSTALL=%s' % prefix)
- make('install', *make_args)
+
+ # Filter the targets we're interested in
+ targets = [
+ 'SuiteSparse_config',
+ 'AMD',
+ 'BTF',
+ 'CAMD',
+ 'CCOLAMD',
+ 'COLAMD',
+ 'CHOLMOD',
+ 'CXSparse',
+ 'LDL',
+ 'KLU',
+ 'UMFPACK',
+ 'RBio',
+ 'SPQR'
+ ]
+ if spec.satisfies('+cuda'):
+ targets.extend([
+ 'SuiteSparse_GPURuntime',
+ 'GPUQREngine'
+ ])
+ targets.extend([
+ 'SPQR'
+ ])
+ if spec.satisfies('+graphblas'):
+ targets.append('GraphBLAS')
+ if spec.satisfies('@5.8.0:'):
+ targets.append('SLIP_LU')
+
+ # Finally make and install
+ make('-C', 'SuiteSparse_config', 'library', 'config')
+ for target in targets:
+ make('-C', target, 'library', *make_args)
+ make('-C', target, 'install', *make_args)
@run_after('install')
def fix_darwin_install(self):
@@ -179,9 +247,5 @@ class SuiteSparse(Package):
'suitesparseconfig']
query_parameters = self.spec.last_query.extra_parameters
comps = all_comps if not query_parameters else query_parameters
- libs = find_libraries(['lib' + c for c in comps], root=self.prefix.lib,
+ return find_libraries(['lib' + c for c in comps], root=self.prefix.lib,
shared=True, recursive=False)
- if not libs:
- return None
- libs += find_system_libraries('librt')
- return libs
diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py
index acb6f993a6..2529b30621 100644
--- a/var/spack/repos/builtin/packages/sundials/package.py
+++ b/var/spack/repos/builtin/packages/sundials/package.py
@@ -18,7 +18,7 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/llnl/sundials.git"
tags = ['radiuss', 'e4s']
- maintainers = ['cswoodward', 'gardner48', 'balos1']
+ maintainers = ['balos1', 'cswoodward', 'gardner48']
# ==========================================================================
# Versions
@@ -290,6 +290,7 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage):
if '+rocm' in spec:
args.extend([
+ '-DCMAKE_C_COMPILER=%s' % (spec['llvm-amdgpu'].prefix + '/bin/clang'),
'-DCMAKE_CXX_COMPILER=%s' % spec['hip'].hipcc,
'-DENABLE_HIP=ON',
'-DHIP_PATH=%s' % spec['hip'].prefix,
diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py
index 82aa8f134f..3570243a9a 100644
--- a/var/spack/repos/builtin/packages/superlu-dist/package.py
+++ b/var/spack/repos/builtin/packages/superlu-dist/package.py
@@ -6,7 +6,7 @@
from spack import *
-class SuperluDist(CMakePackage, CudaPackage):
+class SuperluDist(CMakePackage, CudaPackage, ROCmPackage):
"""A general purpose library for the direct solution of large, sparse,
nonsymmetric systems of linear equations on high performance machines."""
@@ -16,9 +16,10 @@ class SuperluDist(CMakePackage, CudaPackage):
tags = ['e4s']
- maintainers = ['xiaoye', 'gchavez2', 'balay', 'pghysels']
+ maintainers = ['xiaoye', 'gchavez2', 'balay', 'pghysels', 'liuyangzhuan']
version('develop', branch='master')
+ version('amd', branch='amd')
version('7.1.1', sha256='558053b3d4a56eb661c4f04d4fcab6604018ce5db97115394c161b56c9c278ff')
version('7.1.0', sha256='edbea877562be95fb22c7de1ff484f18685bec4baa8e4f703c414d3c035d4a66')
version('6.4.0', sha256='cb9c0b2ba4c28e5ed5817718ba19ae1dd63ccd30bc44c8b8252b54f5f04a44cc')
@@ -47,12 +48,16 @@ class SuperluDist(CMakePackage, CudaPackage):
depends_on('parmetis')
depends_on('metis@5:')
depends_on('cmake@3.18.1:', type='build', when='@7.1.0:')
+ depends_on('hipblas', when='+rocm')
+ depends_on('rocsolver', when='+rocm')
+ conflicts('+rocm', when='+cuda')
conflicts('+cuda', when='@:6.3')
conflicts('^cuda@11.5.0:', when='@7.1.0:')
patch('xl-611.patch', when='@:6.1.1 %xl')
patch('xl-611.patch', when='@:6.1.1 %xl_r')
+ patch('superlu-cray-ftn-case.patch', when='@7.1.1 %cce')
def cmake_args(self):
spec = self.spec
@@ -60,6 +65,7 @@ class SuperluDist(CMakePackage, CudaPackage):
'-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
'-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
'-DCMAKE_INSTALL_LIBDIR:STRING=%s' % self.prefix.lib,
+ '-DCMAKE_INSTALL_BINDIR:STRING=%s' % self.prefix.bin,
'-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(";"),
'-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(";"),
'-DUSE_XSDK_DEFAULTS=YES',
@@ -94,6 +100,15 @@ class SuperluDist(CMakePackage, CudaPackage):
args.append(
'-DCMAKE_CUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch[0]))
+ if '+rocm' in spec and spec.satisfies('@amd'):
+ args.append('-DTPL_ENABLE_HIPLIB=TRUE')
+ args.append(
+ '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix))
+ rocm_archs = spec.variants['amdgpu_target'].value
+ if 'none' not in rocm_archs:
+ args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'.
+ format(",".join(rocm_archs)))
+
if '+shared' in spec:
args.append('-DBUILD_SHARED_LIBS:BOOL=ON')
else:
diff --git a/var/spack/repos/builtin/packages/superlu-dist/superlu-cray-ftn-case.patch b/var/spack/repos/builtin/packages/superlu-dist/superlu-cray-ftn-case.patch
new file mode 100644
index 0000000000..8f3963656e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/superlu-dist/superlu-cray-ftn-case.patch
@@ -0,0 +1,15 @@
+diff --git a/FORTRAN/CMakeLists.txt b/FORTRAN/CMakeLists.txt
+index bf402a9..6a039e1 100644
+--- a/FORTRAN/CMakeLists.txt
++++ b/FORTRAN/CMakeLists.txt
+@@ -3,8 +3,8 @@ include_directories(${SuperLU_DIST_SOURCE_DIR}/SRC)
+ include_directories(${SuperLU_DIST_BINARY_DIR}/FORTRAN)
+
+ set(headers
+- ${CMAKE_BINARY_DIR}/FORTRAN/superlu_mod.mod
+- ${CMAKE_BINARY_DIR}/FORTRAN/superlupara_mod.mod
++ ${CMAKE_BINARY_DIR}/FORTRAN/SUPERLU_MOD.mod
++ ${CMAKE_BINARY_DIR}/FORTRAN/SUPERLUPARA_MOD.mod
+ ${CMAKE_BINARY_DIR}/FORTRAN/superlu_dist_config.fh
+ )
+
diff --git a/var/spack/repos/builtin/packages/symengine/package.py b/var/spack/repos/builtin/packages/symengine/package.py
index 6163feee43..e6f11000ea 100644
--- a/var/spack/repos/builtin/packages/symengine/package.py
+++ b/var/spack/repos/builtin/packages/symengine/package.py
@@ -17,6 +17,7 @@ class Symengine(CMakePackage):
maintainers = ['HaoZeke']
version('master', branch='master')
+ version('0.8.1', sha256='41eb6ae6901c09e53d7f61f0758f9201e81fc534bfeecd4b2bd4b4e6f6768693')
version('0.7.0', sha256='8b865bb72b71539d9cd2488a0468c6c3ea90cc606062a7880c3ff5af6abd74b1')
version('0.6.0', sha256='4d2caa86c03eaaa8ed004084d02f87b5c51b6229f8ba70d161227e22d6302f0a')
version('0.5.0', sha256='5d02002f00d16a0928d1056e6ecb8f34fd59f3bfd8ed0009a55700334dbae29b')
diff --git a/var/spack/repos/builtin/packages/tangram/package.py b/var/spack/repos/builtin/packages/tangram/package.py
index 265cc11574..3c54c07328 100644
--- a/var/spack/repos/builtin/packages/tangram/package.py
+++ b/var/spack/repos/builtin/packages/tangram/package.py
@@ -22,7 +22,7 @@ class Tangram(CMakePackage):
version('1.0.1', sha256='8f2f8c01bb2d726b0f64e5a5bc3aa2bd8057ccaee7a29c68f1439d16e39aaa90')
version('master', branch='master', submodules=True)
- variant('mpi', default=True,
+ variant('mpi', default=False,
description='Enable interface reconstruction with MPI')
variant('thrust', default=False,
description='Enable on-node parallelism with NVidia Thrust')
@@ -46,15 +46,12 @@ class Tangram(CMakePackage):
depends_on('cmake@3.13:', type='build')
depends_on('mpi', when='+mpi')
+# Wonton depends array
+ wonton_depends = ['mpi', 'jali', 'openmp', 'thrust', 'kokkos', 'cuda']
- depends_on('wonton')
- depends_on('wonton+jali', when='+jali')
- depends_on('wonton~mpi', when='~mpi')
- depends_on('wonton+mpi', when='+mpi')
- depends_on('wonton+thrust', when='+thrust')
- depends_on('wonton+kokkos', when='+kokkos')
- depends_on('wonton+cuda', when='+cuda')
- depends_on('wonton+openmp', when='+openmp')
+ for _variant in wonton_depends:
+ depends_on('wonton+' + _variant, when='+' + _variant)
+ depends_on('wonton~' + _variant, when='~' + _variant)
def cmake_args(self):
options = []
@@ -87,3 +84,8 @@ class Tangram(CMakePackage):
options.append('-DENABLE_APP_TESTS=OFF')
return options
+
+ def check(self):
+ if self.run_tests:
+ with working_dir(self.build_directory):
+ make("test")
diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py
index fc5fd0060c..fc94444437 100644
--- a/var/spack/repos/builtin/packages/tau/package.py
+++ b/var/spack/repos/builtin/packages/tau/package.py
@@ -28,6 +28,7 @@ class Tau(Package):
tags = ['e4s']
version('master', branch='master')
+ version('2.31', sha256='27e73c395dd2a42b91591ce4a76b88b1f67663ef13aa19ef4297c68f45d946c2')
version('2.30.2', sha256='43f84a15b71a226f8a64d966f0cb46022bcfbaefb341295ecc6fa80bb82bbfb4')
version('2.30.1', sha256='9c20ca1b4f4e80d885f24491cee598068871f0e9dd67906a5e47e4b4147d08fc')
version('2.30', sha256='e581c33e21488d69839a00d97fd4451ea579f47249b2750d5c36bea773041eaf')
@@ -102,7 +103,8 @@ class Tau(Package):
depends_on('elf', when='+elf')
# TAU requires the ELF header support, libiberty and demangle.
depends_on('binutils@:2.33.1+libiberty+headers+plugins', when='+binutils')
- depends_on('python@2.7:', when='+python')
+ # Build errors with Python 3.9
+ depends_on('python@2.7:3.8', when='+python')
depends_on('libunwind', when='+libunwind')
depends_on('mpi', when='+mpi', type=('build', 'run', 'link'))
depends_on('cuda', when='+cuda')
diff --git a/var/spack/repos/builtin/packages/thepeg/package.py b/var/spack/repos/builtin/packages/thepeg/package.py
index f228f8410f..a0e76d79c3 100644
--- a/var/spack/repos/builtin/packages/thepeg/package.py
+++ b/var/spack/repos/builtin/packages/thepeg/package.py
@@ -99,7 +99,7 @@ class Thepeg(AutotoolsPackage):
if self.spec.satisfies('@2.0.3:'):
args += ['--with-rivet=' + self.spec['rivet'].prefix]
- if self.spec.satisfies('@:2.1'):
+ if self.spec.satisfies('@2.1.1:'):
args += ['--with-boost=' + self.spec['boost'].prefix]
args += ['CFLAGS=-O2', 'CXXFLAGS=-O2', 'FFLAGS=-O2']
diff --git a/var/spack/repos/builtin/packages/tinygltf/package.py b/var/spack/repos/builtin/packages/tinygltf/package.py
new file mode 100644
index 0000000000..7c752db898
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tinygltf/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Tinygltf(CMakePackage):
+ """Header only C++11 tiny glTF 2.0 library."""
+
+ homepage = "https://github.com/syoyo/tinygltf"
+ url = "https://github.com/syoyo/tinygltf/archive/refs/tags/v2.5.0.tar.gz"
+
+ version('2.5.0', sha256='5d85bd556b60b1b69527189293cfa4902957d67fabb8582b6532f23a5ef27ec1')
+
+ depends_on('cmake@3.6:', type='build')
diff --git a/var/spack/repos/builtin/packages/tinyobjloader/package.py b/var/spack/repos/builtin/packages/tinyobjloader/package.py
new file mode 100644
index 0000000000..2ce32e29dc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tinyobjloader/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Tinyobjloader(CMakePackage):
+ """Tiny but powerful single file wavefront obj loader."""
+
+ homepage = "https://github.com/tinyobjloader/tinyobjloader"
+ url = "https://github.com/tinyobjloader/tinyobjloader/archive/refs/tags/v1.0.6.tar.gz"
+
+ version('1.0.6', sha256='19ee82cd201761954dd833de551edb570e33b320d6027e0d91455faf7cd4c341')
+
+ depends_on('cmake@2.8.11:', type='build')
diff --git a/var/spack/repos/builtin/packages/tiptop/NR_perf_counter_open_aarch64.patch b/var/spack/repos/builtin/packages/tiptop/NR_perf_counter_open_aarch64.patch
index 7b5aa5a6bb..7b5aa5a6bb 100755..100644
--- a/var/spack/repos/builtin/packages/tiptop/NR_perf_counter_open_aarch64.patch
+++ b/var/spack/repos/builtin/packages/tiptop/NR_perf_counter_open_aarch64.patch
diff --git a/var/spack/repos/builtin/packages/tree-sitter/package.py b/var/spack/repos/builtin/packages/tree-sitter/package.py
new file mode 100644
index 0000000000..c75a0a28ca
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tree-sitter/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack import *
+
+
+class TreeSitter(MakefilePackage):
+ """Tree-sitter is a parser generator tool and an incremental parsing library.
+ It can build a concrete syntax tree for a source file and
+ efficiently update the syntax tree as the source file is edited."""
+
+ homepage = "https://tree-sitter.github.io/tree-sitter/"
+ url = "https://github.com/tree-sitter/tree-sitter/archive/refs/tags/v0.20.1.tar.gz"
+
+ maintainers = ['albestro']
+
+ version('0.20.1', sha256='12a3f7206af3028dbe8a0de50d8ebd6d7010bf762db918acae76fc7585f1258d')
+
+ def edit(self, spec, prefix):
+ env['PREFIX'] = prefix
diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py
index ae8cd1a43d..68276b65ca 100644
--- a/var/spack/repos/builtin/packages/trilinos/package.py
+++ b/var/spack/repos/builtin/packages/trilinos/package.py
@@ -295,6 +295,7 @@ class Trilinos(CMakePackage, CudaPackage):
depends_on('hypre~internal-superlu~int64', when='+hypre')
depends_on('kokkos-nvcc-wrapper', when='+wrapper')
depends_on('lapack')
+ # depends_on('perl', type=('build',)) # TriBITS finds but doesn't use...
depends_on('libx11', when='+x11')
depends_on('matio', when='+exodus')
depends_on('metis', when='+zoltan')
@@ -438,20 +439,18 @@ class Trilinos(CMakePackage, CudaPackage):
options.extend([
define('Trilinos_VERBOSE_CONFIGURE', False),
define_from_variant('BUILD_SHARED_LIBS', 'shared'),
- define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
- define_trilinos_enable('TESTS', False),
- define_trilinos_enable('EXAMPLES', False),
+ define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
+ define_trilinos_enable('ALL_OPTIONAL_PACKAGES', False),
+ define_trilinos_enable('ALL_PACKAGES', False),
define_trilinos_enable('CXX11', True),
define_trilinos_enable('DEBUG', 'debug'),
+ define_trilinos_enable('EXAMPLES', False),
+ define_trilinos_enable('SECONDARY_TESTED_CODE', True),
+ define_trilinos_enable('TESTS', False),
define_trilinos_enable('Fortran'),
define_trilinos_enable('OpenMP'),
define_trilinos_enable('EXPLICIT_INSTANTIATION',
'explicit_template_instantiation')
- # The following can cause problems on systems that don't have
- # static libraries available for things like dl and pthreads
- # for example when trying to build static libs
- # define_from_variant('TPL_FIND_SHARED_LIBS', 'shared')
- # define('Trilinos_LINK_SEARCH_START_STATIC', '+shared' not in spec)
])
# ################## Trilinos Packages #####################
@@ -563,7 +562,7 @@ class Trilinos(CMakePackage, CudaPackage):
depspec = spec[spack_name]
libs = depspec.libs
options.extend([
- define(trilinos_name + '_INCLUDE_DIRS', depspec.prefix.include),
+ define(trilinos_name + '_INCLUDE_DIRS', depspec.headers.directories),
define(trilinos_name + '_ROOT', depspec.prefix),
define(trilinos_name + '_LIBRARY_NAMES', libs.names),
define(trilinos_name + '_LIBRARY_DIRS', libs.directories),
@@ -631,10 +630,9 @@ class Trilinos(CMakePackage, CudaPackage):
spec['parmetis'].prefix.lib, spec['metis'].prefix.lib
]),
define('ParMETIS_LIBRARY_NAMES', ['parmetis', 'metis']),
- define('TPL_ParMETIS_INCLUDE_DIRS', [
- spec['parmetis'].prefix.include,
- spec['metis'].prefix.include
- ]),
+ define('TPL_ParMETIS_INCLUDE_DIRS',
+ spec['parmetis'].headers.directories +
+ spec['metis'].headers.directories),
])
if spec.satisfies('^superlu-dist@4.0:'):
diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py
index b5f51b4e38..129d4680fb 100644
--- a/var/spack/repos/builtin/packages/umpire/package.py
+++ b/var/spack/repos/builtin/packages/umpire/package.py
@@ -73,6 +73,7 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage):
depends_on('cmake@3.8:', type='build')
depends_on('cmake@3.9:', when='+cuda', type='build')
+ depends_on('cmake@:3.20', when='+rocm', type='build')
depends_on('blt@0.4.1:', type='build', when='@6.0.0:')
depends_on('blt@0.4.0:', type='build', when='@4.1.3:5.0.1')
@@ -104,9 +105,8 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage):
# https://github.com/LLNL/Umpire/issues/653
# This range looks weird, but it ensures the concretizer looks at it as a
- # range, not as a concrete version, so that it also matches compilers
- # specified as `gcc@10.3.0-identifier`. See #8957.
- conflicts('%gcc@10.3.0:10.3.0.0', when='+cuda')
+ # range, not as a concrete version, so that it also matches 10.3.* versions.
+ conflicts('%gcc@10.3.0:10.3', when='+cuda')
def _get_sys_type(self, spec):
sys_type = spec.architecture
@@ -192,6 +192,7 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage):
entries.append(cmake_cache_option(
"ENABLE_BENCHMARKS", 'tests=benchmarks' in spec))
entries.append(cmake_cache_option("ENABLE_EXAMPLES", '+examples' in spec))
+ entries.append(cmake_cache_option("ENABLE_DOCS", False))
entries.append(cmake_cache_option("BUILD_SHARED_LIBS", '+shared' in spec))
entries.append(cmake_cache_option("ENABLE_TESTS", 'tests=none' not in spec))
diff --git a/var/spack/repos/builtin/packages/utf8cpp/package.py b/var/spack/repos/builtin/packages/utf8cpp/package.py
new file mode 100644
index 0000000000..eb05c6ab62
--- /dev/null
+++ b/var/spack/repos/builtin/packages/utf8cpp/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Utf8cpp(Package):
+ """A simple, portable and lightweight generic library for handling UTF-8
+ encoded strings."""
+
+ homepage = "http://utfcpp.sourceforge.net/"
+
+ version('2.3.4', sha256='3373cebb25d88c662a2b960c4d585daf9ae7b396031ecd786e7bb31b15d010ef')
+
+ def url_for_version(self, version):
+ url = "https://sourceforge.net/projects/utfcpp/files/utf8cpp_2x/Release%20{0}/utf8_v{1}.zip"
+ return url.format(version, version.underscored)
+
+ def install(self, spec, prefix):
+ install_tree('doc', prefix.share.doc)
+ install_tree('source', prefix.include)
diff --git a/var/spack/repos/builtin/packages/utf8proc/package.py b/var/spack/repos/builtin/packages/utf8proc/package.py
index 1ec1f0425d..6a8c4a74a8 100644
--- a/var/spack/repos/builtin/packages/utf8proc/package.py
+++ b/var/spack/repos/builtin/packages/utf8proc/package.py
@@ -13,6 +13,9 @@ class Utf8proc(CMakePackage):
homepage = "https://juliastrings.github.io/utf8proc/"
url = "https://github.com/JuliaStrings/utf8proc/archive/v2.4.0.tar.gz"
+ version('2.6.1', sha256='4c06a9dc4017e8a2438ef80ee371d45868bda2237a98b26554de7a95406b283b')
+ version('2.6.0', sha256='b36ce1534b8035e7febd95c031215ed279ee9d31cf9b464e28b4c688133b22c5')
+ version('2.5.0', sha256='d4e8dfc898cfd062493cb7f42d95d70ccdd3a4cd4d90bec0c71b47cca688f1be')
version('2.4.0', sha256='b2e5d547c1d94762a6d03a7e05cea46092aab68636460ff8648f1295e2cdfbd7')
depends_on('cmake@2.8.12:', type='build')
diff --git a/var/spack/repos/builtin/packages/vecgeom/package.py b/var/spack/repos/builtin/packages/vecgeom/package.py
index a7d48585ea..73122defca 100644
--- a/var/spack/repos/builtin/packages/vecgeom/package.py
+++ b/var/spack/repos/builtin/packages/vecgeom/package.py
@@ -20,6 +20,7 @@ class Vecgeom(CMakePackage, CudaPackage):
maintainers = ['drbenmorgan', 'sethrj']
version('master', branch='master')
+ version('1.1.18', sha256='2780640233a36e0d3c767140417015be1893c1ad695ccc0bd3ee0767bc9fbed8')
version('1.1.17', sha256='2e95429b795311a6986320d785bedcd9dace9f8e7b7f6bd778d23a4ff23e0424')
version('1.1.16', sha256='2fa636993156d9d06750586e8a1ac1701ae2be62dea07964e2369698ae521d02')
version('1.1.15', sha256='0ee9897eb12d8d560dc0c9e56e8fdb78d0111f651a984df24e983da035bd1c70')
diff --git a/var/spack/repos/builtin/packages/vecmem/package.py b/var/spack/repos/builtin/packages/vecmem/package.py
new file mode 100644
index 0000000000..e0a920ffd3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/vecmem/package.py
@@ -0,0 +1,48 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Vecmem(CMakePackage, CudaPackage):
+ """VecMem is a vectorised data model base and helper classes."""
+
+ homepage = "https://github.com/acts-project/vecmem"
+ url = "https://github.com/acts-project/vecmem/archive/refs/tags/v0.5.0.tar.gz"
+ list_url = "https://github.com/acts-project/vecmem/releases"
+
+ maintainers = ['wdconinc', 'HadrienG2']
+
+ version('0.8.0', sha256='a13f7178c940d6bf3386e7e8f5eb158e6435882533bffe888d3c9775eeb2f20e')
+ version('0.7.0', sha256='c00266bc80df8f568103f2874ce349fe8f74fb7e361901d562cce41ab7f9b85c')
+ version('0.6.0', sha256='e6c8262844a5ff7f03df7f849a1e7cf1a68214730ac54c35c14333522ff31723')
+ version('0.5.0', sha256='b9739e8fcdf27fa9ef509743cd8f8f62f871b53b0a63b93f24ea9865c2b00a3a')
+ version('0.4.0', sha256='51dfadc2b97f34530c642abdf86dcb6392e753dd68ef011bac89382dcf8aaad4')
+ version('0.3.0', sha256='4e7851ab46fee925800405c5ae18e99b62644d624d3544277a522a06fb812dbf')
+ version('0.2.0', sha256='33aea135989684e325cb097e455ff0f9d1a9e85ff32f671e3b3ed6cc036176ac')
+ version('0.1.0', sha256='19e24e3262aa113cd4242e7b94e2de34a4b362e78553730a358f64351c6a0a01')
+
+ variant('hip', default=False, description='Build the vecmem::hip library')
+ variant('sycl', default=False, description='Build the vecmem::sycl library')
+
+ depends_on('cmake@3.17:', type='build')
+ depends_on('hip', when='+hip')
+ depends_on('sycl', when='+sycl')
+
+ def cmake_args(self):
+ args = [
+ self.define_from_variant('VECMEM_BUILD_CUDA_LIBRARY', 'cuda'),
+ self.define_from_variant('VECMEM_BUILD_HIP_LIBRARY', 'hip'),
+ self.define_from_variant('VECMEM_BUILD_SYCL_LIBRARY', 'sycl'),
+ self.define('VECMEM_BUILD_TESTING', self.run_tests)
+ ]
+
+ if '+cuda' in self.spec:
+ cuda_arch_list = self.spec.variants['cuda_arch'].value
+ cuda_arch = cuda_arch_list[0]
+ if cuda_arch != 'none':
+ args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch))
+
+ return args
diff --git a/var/spack/repos/builtin/packages/visit/package.py b/var/spack/repos/builtin/packages/visit/package.py
index 68276b9831..dafa6d43d1 100644
--- a/var/spack/repos/builtin/packages/visit/package.py
+++ b/var/spack/repos/builtin/packages/visit/package.py
@@ -56,7 +56,7 @@ class Visit(CMakePackage):
############################
homepage = "https://wci.llnl.gov/simulation/computer-codes/visit/"
git = "https://github.com/visit-dav/visit.git"
- url = "https://github.com/visit-dav/visit/releases/download/v3.1.1/visit3.1.1.tar.gz"
+ url = "https://github.com/visit-dav/visit/releases/download/v3.2.1/visit3.2.1.tar.gz"
tags = ['radiuss']
@@ -67,6 +67,7 @@ class Visit(CMakePackage):
executables = ['^visit$']
version('develop', branch='develop')
+ version('3.2.1', sha256='779d59564c63f31fcbfeff24b14ddd6ac941b3bb7d671d31765a770d193f02e8')
version('3.1.1', sha256='0b60ac52fd00aff3cf212a310e36e32e13ae3ca0ddd1ea3f54f75e4d9b6c6cf0')
version('3.0.1', sha256='a506d4d83b8973829e68787d8d721199523ce7ec73e7594e93333c214c2c12bd')
version('2.13.3', sha256='cf0b3d2e39e1cd102dd886d3ef6da892733445e362fc28f24d9682012cccf2e5')
@@ -82,7 +83,7 @@ class Visit(CMakePackage):
variant('hdf5', default=True, description='Enable HDF5 file format')
variant('silo', default=True, description='Enable Silo file format')
variant('python', default=True, description='Enable Python support')
- variant('mpi', default=True, description='Enable parallel engine')
+ variant('mpi', default=False, description='Enable parallel engine')
patch('spack-changes-3.1.patch', when="@3.1.0:,develop")
patch('spack-changes-3.0.1.patch', when="@3.0.1")
@@ -177,19 +178,47 @@ class Visit(CMakePackage):
#
# =====================================
- depends_on('cmake@3.0:', type='build')
+ depends_on('cmake@3.14.7', type='build')
# https://github.com/visit-dav/visit/issues/3498
- depends_on('vtk@8.1.0:8.1+opengl2~python', when='~python @3.0:3,develop')
- depends_on('vtk@8.1.0:8.1+opengl2+python', when='+python @3.0:3,develop')
+ # The vtk_compiler_visibility patch fixes a bug where
+ # VTKGenerateExportHeader.cmake fails to recognize gcc versions 10.0
+ # or greater.
+ # The vtk_rendering_opengl2_x11 patch adds include directories to
+ # Rendering/OpenGL2/CMakeLists.txt for systems that don't have the
+ # system X libraries and include files installed.
+ # The vtk_wrapping_python_x11 patch adds include directories to
+ # Wrapping/Python/CMakelists.txt for systems that don't have the
+ # system X libraries and include files installed.
+ depends_on('vtk@8.1.0+opengl2+osmesa~python',
+ patches=[patch('vtk_compiler_visibility.patch'),
+ patch('vtk_rendering_opengl2_x11.patch'),
+ patch('vtk_wrapping_python_x11.patch'),
+ ],
+ when='~python @3.2:,develop')
+ depends_on('vtk@8.1.0+opengl2+osmesa+python',
+ patches=[patch('vtk_compiler_visibility.patch'),
+ patch('vtk_rendering_opengl2_x11.patch'),
+ patch('vtk_wrapping_python_x11.patch'),
+ ],
+ when='+python @3.2:,develop')
depends_on('glu', when='platform=linux')
- depends_on('vtk@6.1.0~opengl2', when='@:2')
- depends_on('vtk+python', when='+python @3.0:,develop')
+ depends_on('vtk+python', when='+python @3.2:,develop')
depends_on('vtk~mpi', when='~mpi')
depends_on('vtk+qt', when='+gui')
- depends_on('qt+gui@4.8.6:4', when='+gui @:2')
- depends_on('qt+gui@5.10:', when='+gui @3.0:,develop')
+ # VisIt doesn't work with later versions of qt.
+ depends_on('qt+gui@5.14.2:', when='+gui @3.2:,develop')
depends_on('qwt', when='+gui')
- depends_on('python@2.6:2.8', when='+python')
+ # python@3.8 doesn't work with VisIt.
+ depends_on('python@3.7', when='+python')
+ # llvm@12.0.1, @11.1.0, @10.0.1 fail in build phase with gcc 6.1.0.
+ # llvm@9.0.1 fails in cmake phase with gcc 6.1.0.
+ # llvm@12.0.1, llvm@8.0.1 fail in build phase with gcc 11.2.0
+ depends_on('llvm@6:', when='^mesa')
+ depends_on('mesa+glx', when='^mesa')
+ depends_on('mesa-glu', when='^mesa')
+ # VisIt doesn't build with hdf5@1.12 and hdf5@1.10 produces files that
+ # are incompatible with hdf5@1.8.
+ depends_on('hdf5@1.8', when='+hdf5')
# VisIt uses Silo's 'ghost zone' data structures, which are only available
# in v4.10+ releases: https://wci.llnl.gov/simulation/computer-codes/silo/releases/release-notes-4.10
depends_on('silo@4.10:+shared', when='+silo')
@@ -200,13 +229,9 @@ class Visit(CMakePackage):
depends_on('mpi', when='+mpi')
depends_on('adios2', when='+adios2')
- conflicts('+adios2', when='@:2')
- conflicts('+hdf5', when='~gui @:2')
- conflicts('+silo', when='~gui @:2')
-
root_cmakelists_dir = 'src'
- @when('@3.0.0:3,develop')
+ @when('@3.0.0:,develop')
def patch(self):
# Some of VTK's targets don't create explicit libraries, so there is no
# 'vtktiff'. Instead, replace with the library variable defined from
@@ -235,6 +260,7 @@ class Visit(CMakePackage):
'-DVISIT_USE_GLEW=OFF',
'-DCMAKE_CXX_FLAGS=' + ' '.join(cxx_flags),
'-DCMAKE_C_FLAGS=' + ' '.join(cc_flags),
+ '-DVISIT_CONFIG_SITE=NONE',
]
# Provide the plugin compilation environment so as to extend VisIt
@@ -266,6 +292,12 @@ class Visit(CMakePackage):
args.append('-DVISIT_SERVER_COMPONENTS_ONLY=ON')
args.append('-DVISIT_ENGINE_ONLY=ON')
+ if '^mesa' in spec:
+ args.append(
+ '-DVISIT_LLVM_DIR:PATH={0}'.format(spec['llvm'].prefix))
+ args.append(
+ '-DVISIT_MESAGL_DIR:PATH={0}'.format(spec['mesa'].prefix))
+
if '+hdf5' in spec:
args.append(
'-DVISIT_HDF5_DIR:PATH={0}'.format(spec['hdf5'].prefix))
diff --git a/var/spack/repos/builtin/packages/visit/vtk_compiler_visibility.patch b/var/spack/repos/builtin/packages/visit/vtk_compiler_visibility.patch
new file mode 100644
index 0000000000..51c153440e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/visit/vtk_compiler_visibility.patch
@@ -0,0 +1,12 @@
+diff -u ./CMake/VTKGenerateExportHeader.cmake.orig ./CMake/VTKGenerateExportHeader.cmake
+--- ./CMake/VTKGenerateExportHeader.cmake.orig 2021-11-03 14:32:17.607243000 -0700
++++ ./CMake/VTKGenerateExportHeader.cmake 2021-11-03 14:35:54.896214000 -0700
+@@ -174,7 +174,7 @@
+ execute_process(COMMAND ${CMAKE_C_COMPILER} --version
+ OUTPUT_VARIABLE _gcc_version_info
+ ERROR_VARIABLE _gcc_version_info)
+- string(REGEX MATCH "[3-9]\\.[0-9]\\.[0-9]*"
++ string(REGEX MATCH "[0-9]+\\.[0-9]+\\.[0-9]*"
+ _gcc_version "${_gcc_version_info}")
+ # gcc on mac just reports: "gcc (GCC) 3.3 20030304 ..." without the
+ # patch level, handle this here:
diff --git a/var/spack/repos/builtin/packages/visit/vtk_rendering_opengl2_x11.patch b/var/spack/repos/builtin/packages/visit/vtk_rendering_opengl2_x11.patch
new file mode 100644
index 0000000000..dd5b2db86d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/visit/vtk_rendering_opengl2_x11.patch
@@ -0,0 +1,12 @@
+diff -u ./Rendering/OpenGL2/CMakeLists.txt.orig ./Rendering/OpenGL2/CMakeLists.txt
+--- ./Rendering/OpenGL2/CMakeLists.txt.orig 2021-11-03 14:33:11.582334000 -0700
++++ ./Rendering/OpenGL2/CMakeLists.txt 2021-11-03 14:36:50.263234000 -0700
+@@ -194,7 +194,7 @@
+ if(NOT X11_Xt_FOUND)
+ message(FATAL_ERROR "X11_Xt_LIB could not be found. Required for VTK X lib.")
+ endif()
+- include_directories(${X11_INCLUDE_DIR} ${X11_Xt_INCLUDE_PATH})
++ include_directories(${X11_INCLUDE_DIR} ${X11_SM_INCLUDE_PATH} ${X11_ICE_INCLUDE_PATH} ${X11_Xt_INCLUDE_PATH})
+ endif()
+
+ # setup factory overrides
diff --git a/var/spack/repos/builtin/packages/visit/vtk_wrapping_python_x11.patch b/var/spack/repos/builtin/packages/visit/vtk_wrapping_python_x11.patch
new file mode 100644
index 0000000000..93ca157281
--- /dev/null
+++ b/var/spack/repos/builtin/packages/visit/vtk_wrapping_python_x11.patch
@@ -0,0 +1,14 @@
+diff -u ./Wrapping/Python/CMakeLists.txt.orig ./Wrapping/Python/CMakeLists.txt
+--- ./Wrapping/Python/CMakeLists.txt.orig 2021-11-03 14:33:41.413950000 -0700
++++ ./Wrapping/Python/CMakeLists.txt 2021-11-03 14:37:03.634738000 -0700
+@@ -23,6 +23,10 @@
+
+ get_property(VTK_PYTHON_MODULES GLOBAL PROPERTY VTK_PYTHON_WRAPPED)
+
++include_directories(${X11_Xlib_INCLUDE_PATH})
++include_directories(${X11_X11_INCLUDE_PATH})
++include_directories(${X11_Xt_INCLUDE_PATH})
++
+ # Get the include directories for the module and all its dependencies.
+ macro(vtk_include_recurse module)
+ _vtk_module_config_recurse("${module}_PYTHON" ${module})
diff --git a/var/spack/repos/builtin/packages/votca-csg-tutorials/package.py b/var/spack/repos/builtin/packages/votca-csg-tutorials/package.py
index d544770e9c..a871a3c396 100644
--- a/var/spack/repos/builtin/packages/votca-csg-tutorials/package.py
+++ b/var/spack/repos/builtin/packages/votca-csg-tutorials/package.py
@@ -20,23 +20,21 @@ class VotcaCsgTutorials(CMakePackage):
git = "https://github.com/votca/csg-tutorials.git"
maintainers = ['junghans']
- version('master', branch='master')
- version('stable', branch='stable')
- version('2021.2', sha256='156c5ec55a288e3013d393e66a1d2f09ebf4f14056d50d081535004696e7f5ba')
- version('2021.1', sha256='5ea1e6ca370e6e7845f9195495f5fb8bbd72d601980e123ae7852f491f03949a')
- version('2021', sha256='2b85c69007bb7d773529020e55fd82fed65651ee21eedccca9a801ab248ece97')
- version('1.6.4', sha256='34ef40db6b178a7f513f8a6f43e7caff6ecb498d66d7bf8bc44900bc7aea31dc')
- version('1.6.3', sha256='709582b978d84f9de09ae6c3ba4ed28daec886d4e0431bc7d19c7246bd65f0b1')
- version('1.6.2', sha256='7c25e76391f3ffdd15f8a91aeed2d3ce7377591f128ed4ae34b36eca20e5af8f')
- version('1.6.1', sha256='d8428c4a03ce42d88317045ec555af3defa022fd9a61f05e07b57c5577288c8c')
- version('1.6', sha256='54946c647724f1beb95942d47ec7f4cf7a95a59ec7268522693d5ec723585daf')
- version('1.5.1', sha256='e35cea92df0e7d05ca7b449c1b5d84d887a3a23c7796abe3b84e4d6feec7faca')
- version('1.5', sha256='03b841fb94129cf59781a7a5e3b71936c414aa9dfa17a50d7bc856d46274580c')
- version('1.4.1', sha256='623724192c3a7d76b603a74a3326f181045f10f38b9f56dce754a90f1a74556e')
- version('1.4', sha256='27d50acd68a9d8557fef18ec2b0c62841ae91c22275ab9afbd65c35e4dd5f719')
+ version('stable', branch='stable', deprecated=True)
+ version('2021.2', sha256='156c5ec55a288e3013d393e66a1d2f09ebf4f14056d50d081535004696e7f5ba', deprecated=True)
+ version('2021.1', sha256='5ea1e6ca370e6e7845f9195495f5fb8bbd72d601980e123ae7852f491f03949a', deprecated=True)
+ version('2021', sha256='2b85c69007bb7d773529020e55fd82fed65651ee21eedccca9a801ab248ece97', deprecated=True)
+ version('1.6.4', sha256='34ef40db6b178a7f513f8a6f43e7caff6ecb498d66d7bf8bc44900bc7aea31dc', deprecated=True)
+ version('1.6.3', sha256='709582b978d84f9de09ae6c3ba4ed28daec886d4e0431bc7d19c7246bd65f0b1', deprecated=True)
+ version('1.6.2', sha256='7c25e76391f3ffdd15f8a91aeed2d3ce7377591f128ed4ae34b36eca20e5af8f', deprecated=True)
+ version('1.6.1', sha256='d8428c4a03ce42d88317045ec555af3defa022fd9a61f05e07b57c5577288c8c', deprecated=True)
+ version('1.6', sha256='54946c647724f1beb95942d47ec7f4cf7a95a59ec7268522693d5ec723585daf', deprecated=True)
+ version('1.5.1', sha256='e35cea92df0e7d05ca7b449c1b5d84d887a3a23c7796abe3b84e4d6feec7faca', deprecated=True)
+ version('1.5', sha256='03b841fb94129cf59781a7a5e3b71936c414aa9dfa17a50d7bc856d46274580c', deprecated=True)
+ version('1.4.1', sha256='623724192c3a7d76b603a74a3326f181045f10f38b9f56dce754a90f1a74556e', deprecated=True)
+ version('1.4', sha256='27d50acd68a9d8557fef18ec2b0c62841ae91c22275ab9afbd65c35e4dd5f719', deprecated=True)
for v in ["1.4", "1.4.1", "1.5", "1.5.1", "1.6", "1.6.1", "1.6.2",
- "1.6.3", "1.6.4", "2021", "2021.1", "2021.2", "master",
- "stable"]:
+ "1.6.3", "1.6.4", "2021", "2021.1", "2021.2", "stable"]:
depends_on('votca-csg@%s' % v, when="@%s:%s.0" % (v, v))
depends_on("boost")
diff --git a/var/spack/repos/builtin/packages/votca-csg/package.py b/var/spack/repos/builtin/packages/votca-csg/package.py
index 986d064e87..83ce2cedb2 100644
--- a/var/spack/repos/builtin/packages/votca-csg/package.py
+++ b/var/spack/repos/builtin/packages/votca-csg/package.py
@@ -20,25 +20,23 @@ class VotcaCsg(CMakePackage):
git = "https://github.com/votca/csg.git"
maintainers = ['junghans']
- version('master', branch='master')
- version('stable', branch='stable')
- version('2021.2', sha256='4c58ea90cc1b7fe95f7bc00634faadba945316417e741192d715cea6aa83f4ac')
- version('2021.1', sha256='1e9cf90ddd7539e711e795292b721a4ee130a2089e659fa068a12960b77fff14')
- version('2021', sha256='d66c9b30ce2a56d630d5db281444447d398be643005ebea70d3735fb60357305')
- version('1.6.4', sha256='eae771b623f3c3edb09744030d053f10c75d64bad919df26c4f9bf3bfaa1cf86')
- version('1.6.3', sha256='35456b1f3116364b10ada37d99798294bd2d3df2e670cef3936251f88036ef88')
- version('1.6.2', sha256='96b244b282005259832ed6ec0dc22dafe132dcfc3d73dcd8e53b62f40befb545')
- version('1.6.1', sha256='ed12bcb1ccdf71f54e21cdcc9803add4b8ebdc6b8263cb5b0034f5db01e31dbb')
- version('1.6', sha256='8cf6a4ac3ef7347c720a44d8a676f8cbd1462e162f6113de39f27b89354465ea')
- version('1.5.1', sha256='7fca1261bd267bf38d2edd26259730fed3126c0c3fd91fb81940dbe17bb568fd')
- version('1.5', sha256='160387cdc51f87dd20ff2e2eed97086beee415d48f3c92f4199f6109068c8ff4')
- version('1.4.1', sha256='41dccaecadd0165c011bec36a113629e27745a5a133d1a042efe4356acdb5450')
- version('1.4', sha256='c13e7febd792de8c3d426203f089bd4d33b8067f9db5e8840e4579c88b61146e')
+ version('stable', branch='stable', deprecated=True)
+ version('2021.2', sha256='4c58ea90cc1b7fe95f7bc00634faadba945316417e741192d715cea6aa83f4ac', deprecated=True)
+ version('2021.1', sha256='1e9cf90ddd7539e711e795292b721a4ee130a2089e659fa068a12960b77fff14', deprecated=True)
+ version('2021', sha256='d66c9b30ce2a56d630d5db281444447d398be643005ebea70d3735fb60357305', deprecated=True)
+ version('1.6.4', sha256='eae771b623f3c3edb09744030d053f10c75d64bad919df26c4f9bf3bfaa1cf86', deprecated=True)
+ version('1.6.3', sha256='35456b1f3116364b10ada37d99798294bd2d3df2e670cef3936251f88036ef88', deprecated=True)
+ version('1.6.2', sha256='96b244b282005259832ed6ec0dc22dafe132dcfc3d73dcd8e53b62f40befb545', deprecated=True)
+ version('1.6.1', sha256='ed12bcb1ccdf71f54e21cdcc9803add4b8ebdc6b8263cb5b0034f5db01e31dbb', deprecated=True)
+ version('1.6', sha256='8cf6a4ac3ef7347c720a44d8a676f8cbd1462e162f6113de39f27b89354465ea', deprecated=True)
+ version('1.5.1', sha256='7fca1261bd267bf38d2edd26259730fed3126c0c3fd91fb81940dbe17bb568fd', deprecated=True)
+ version('1.5', sha256='160387cdc51f87dd20ff2e2eed97086beee415d48f3c92f4199f6109068c8ff4', deprecated=True)
+ version('1.4.1', sha256='41dccaecadd0165c011bec36a113629e27745a5a133d1a042efe4356acdb5450', deprecated=True)
+ version('1.4', sha256='c13e7febd792de8c3d426203f089bd4d33b8067f9db5e8840e4579c88b61146e', deprecated=True)
depends_on("cmake@2.8:", type='build')
for v in ["1.4", "1.4.1", "1.5", "1.5.1", "1.6", "1.6.1", "1.6.2",
- "1.6.3", "1.6.4", "2021", "2021.1", "2021.2", "master",
- "stable"]:
+ "1.6.3", "1.6.4", "2021", "2021.1", "2021.2", "stable"]:
depends_on('votca-tools@%s' % v, when="@%s:%s.0" % (v, v))
depends_on("boost")
depends_on("gromacs~mpi@5.1:2019")
diff --git a/var/spack/repos/builtin/packages/votca-csgapps/package.py b/var/spack/repos/builtin/packages/votca-csgapps/package.py
index 8b90e7ba8a..c2978d9e62 100644
--- a/var/spack/repos/builtin/packages/votca-csgapps/package.py
+++ b/var/spack/repos/builtin/packages/votca-csgapps/package.py
@@ -20,15 +20,15 @@ class VotcaCsgapps(CMakePackage):
git = "https://github.com/votca/csgapps.git"
maintainers = ['junghans']
- version('1.6.4', sha256='ef3d6fbc7f2ff2f29af7d170a5351ae3c37f52ca4c2b1697b1d2e30c26ff4eb1')
- version('1.6.3', sha256='fdb6a94eabdfe1bfae6002da16e364086d036c2dc24700a941b73d5bb1afc422')
- version('1.6.2', sha256='f7db0bda27d4419c570f44dc60d04b1fd7b4cdcf10db6301005fca70111fcfe3')
- version('1.6.1', sha256='03c7cef2a76e73cf953b2b5ea2cdca765ec1a2627d0a9d8869d46166e63d197c')
- version('1.6', sha256='084bbc5b179bb7eb8f6671d2d5fa13e69e68946570c9120a7e4b10aff1866e2e')
- version('1.5.1', sha256='b4946711e88a1745688b6cce5aad872e6e2ea200fededf38d77a864883e3750e')
- version('1.5', sha256='18b40ce6222509bc70aa9d56b8c538cd5903edf7294d6f95530668e555206d5b')
- version('1.4.1', sha256='095d9ee4cd49d2fd79c10e0e84e6890b755e54dec6a5cd580a2b4241ba230a2b')
- version('1.4', sha256='4ea8348c2f7de3cc488f48fbd8652e69b52515441952766c06ff67ed1aaf69a0')
+ version('1.6.4', sha256='ef3d6fbc7f2ff2f29af7d170a5351ae3c37f52ca4c2b1697b1d2e30c26ff4eb1', deprecated=True)
+ version('1.6.3', sha256='fdb6a94eabdfe1bfae6002da16e364086d036c2dc24700a941b73d5bb1afc422', deprecated=True)
+ version('1.6.2', sha256='f7db0bda27d4419c570f44dc60d04b1fd7b4cdcf10db6301005fca70111fcfe3', deprecated=True)
+ version('1.6.1', sha256='03c7cef2a76e73cf953b2b5ea2cdca765ec1a2627d0a9d8869d46166e63d197c', deprecated=True)
+ version('1.6', sha256='084bbc5b179bb7eb8f6671d2d5fa13e69e68946570c9120a7e4b10aff1866e2e', deprecated=True)
+ version('1.5.1', sha256='b4946711e88a1745688b6cce5aad872e6e2ea200fededf38d77a864883e3750e', deprecated=True)
+ version('1.5', sha256='18b40ce6222509bc70aa9d56b8c538cd5903edf7294d6f95530668e555206d5b', deprecated=True)
+ version('1.4.1', sha256='095d9ee4cd49d2fd79c10e0e84e6890b755e54dec6a5cd580a2b4241ba230a2b', deprecated=True)
+ version('1.4', sha256='4ea8348c2f7de3cc488f48fbd8652e69b52515441952766c06ff67ed1aaf69a0', deprecated=True)
for v in ["1.4", "1.4.1", "1.5", "1.5.1", "1.6", "1.6.1", "1.6.2",
"1.6.3", "1.6.4"]:
diff --git a/var/spack/repos/builtin/packages/votca-ctp/package.py b/var/spack/repos/builtin/packages/votca-ctp/package.py
index 74c2819810..d3c7db41a7 100644
--- a/var/spack/repos/builtin/packages/votca-ctp/package.py
+++ b/var/spack/repos/builtin/packages/votca-ctp/package.py
@@ -19,9 +19,8 @@ class VotcaCtp(CMakePackage):
url = "https://github.com/votca/ctp/tarball/v1.5"
git = "https://github.com/votca/ctp.git"
- version('master', branch='master')
- version('1.5.1', sha256='ef957c2f6b09335d0d27ecb7e1b80b55e76a100247bc0d0b3cfef7718d2a1126')
- version('1.5', sha256='31eb6bcc9339e575116f0c91fe7a4ce7d4189f31f0640329c993fea911401d65')
+ version('1.5.1', sha256='ef957c2f6b09335d0d27ecb7e1b80b55e76a100247bc0d0b3cfef7718d2a1126', deprecated=True)
+ version('1.5', sha256='31eb6bcc9339e575116f0c91fe7a4ce7d4189f31f0640329c993fea911401d65', deprecated=True)
depends_on("cmake@2.8:", type='build')
depends_on('votca-tools@1.5.1')
diff --git a/var/spack/repos/builtin/packages/votca-tools/package.py b/var/spack/repos/builtin/packages/votca-tools/package.py
index 0c72242b0c..a253b9857d 100644
--- a/var/spack/repos/builtin/packages/votca-tools/package.py
+++ b/var/spack/repos/builtin/packages/votca-tools/package.py
@@ -20,20 +20,19 @@ class VotcaTools(CMakePackage):
git = "https://github.com/votca/tools.git"
maintainers = ['junghans']
- version('master', branch='master')
- version('stable', branch='stable')
- version('2021.2', sha256='2cd3175b65924803aff90dce49f60e1dda9015988a453d60358e51f0dbb4292d')
- version('2021.1', sha256='c2fdf5ab72fc75580fb3623182fa88dd0eed856388bdc862aff42148bb0a16e7')
- version('2021', sha256='b84f68ba4a8bfae7b06b61e1e078dcbfb3b340c516da3be39ef545152da00ccd')
- version('1.6.4', sha256='aa79ef4617a80ba3ca063932d5ee0d5767c0285b4b613abd373ad3c986ab9f4c')
- version('1.6.3', sha256='b4ba63861f4342070d81309992f76c4cc798dffeab894bff64799881e75b3cc2')
- version('1.6.2', sha256='1b31e0dd7550b80b963e6714d671f3516d68ebc1e75068a5d827a6e8b4f1759a')
- version('1.6.1', sha256='3e8f51d484cb3fdfbeb851aab387807ba4c40aecef8317c90182da68ad282dcc')
- version('1.6', sha256='cfd0fedc80fecd009f743b5df47777508d76bf3ef294a508a9f11fbb42efe9a5')
- version('1.5.1', sha256='4be4fe25a2910e24e1720cd9022d214001d38196033ade8f9d6e618b4f47d5c4')
- version('1.5', sha256='a82a6596c24ff06e79eab17ca02f4405745ceeeb66369693a59023ad0b62cf22')
- version('1.4.1', sha256='b6b87f6bec8db641a1d8660422ca44919252a69494b32ba6c8c9ac986bae9a65')
- version('1.4', sha256='41638122e7e59852af61d391b4ab8c308fd2e16652f768077e13a99d206ec5d3')
+ version('stable', branch='stable', deprecated=True)
+ version('2021.2', sha256='2cd3175b65924803aff90dce49f60e1dda9015988a453d60358e51f0dbb4292d', deprecated=True)
+ version('2021.1', sha256='c2fdf5ab72fc75580fb3623182fa88dd0eed856388bdc862aff42148bb0a16e7', deprecated=True)
+ version('2021', sha256='b84f68ba4a8bfae7b06b61e1e078dcbfb3b340c516da3be39ef545152da00ccd', deprecated=True)
+ version('1.6.4', sha256='aa79ef4617a80ba3ca063932d5ee0d5767c0285b4b613abd373ad3c986ab9f4c', deprecated=True)
+ version('1.6.3', sha256='b4ba63861f4342070d81309992f76c4cc798dffeab894bff64799881e75b3cc2', deprecated=True)
+ version('1.6.2', sha256='1b31e0dd7550b80b963e6714d671f3516d68ebc1e75068a5d827a6e8b4f1759a', deprecated=True)
+ version('1.6.1', sha256='3e8f51d484cb3fdfbeb851aab387807ba4c40aecef8317c90182da68ad282dcc', deprecated=True)
+ version('1.6', sha256='cfd0fedc80fecd009f743b5df47777508d76bf3ef294a508a9f11fbb42efe9a5', deprecated=True)
+ version('1.5.1', sha256='4be4fe25a2910e24e1720cd9022d214001d38196033ade8f9d6e618b4f47d5c4', deprecated=True)
+ version('1.5', sha256='a82a6596c24ff06e79eab17ca02f4405745ceeeb66369693a59023ad0b62cf22', deprecated=True)
+ version('1.4.1', sha256='b6b87f6bec8db641a1d8660422ca44919252a69494b32ba6c8c9ac986bae9a65', deprecated=True)
+ version('1.4', sha256='41638122e7e59852af61d391b4ab8c308fd2e16652f768077e13a99d206ec5d3', deprecated=True)
# https://github.com/votca/tools/pull/229, fix mkl in exported target
patch("https://github.com/votca/tools/pull/229.patch", sha256="250d0b679e5d3104e3c8d6adf99751b71386c7ed4cbdae1c75408717ef3f401f", when="@1.6:1.6.0+mkl")
diff --git a/var/spack/repos/builtin/packages/votca-xtp/package.py b/var/spack/repos/builtin/packages/votca-xtp/package.py
index c9b24f784f..141afdb44c 100644
--- a/var/spack/repos/builtin/packages/votca-xtp/package.py
+++ b/var/spack/repos/builtin/packages/votca-xtp/package.py
@@ -20,24 +20,23 @@ class VotcaXtp(CMakePackage):
git = "https://github.com/votca/xtp.git"
maintainers = ['junghans']
- version('master', branch='master')
- version('stable', branch='stable')
- version('2021.2', sha256='a13180cc05a24c441326a2b209e4d1cc6b176f1b8d7aec1aea46b627e230ff8c')
- version('2021.1', sha256='8ce112fc40676690369133188848dfeb3875d57351286cad4c312057a4dd767b')
- version('2021', sha256='43bb5a52fec675738f4b5896f0833a1c1090bd7e74f97769697495abf4652e40')
- version('1.6.4', sha256='699a835954556cf6b2f20dac7942c1761c6dd6c6c3fbdde62c8bfcfd71ee075b')
- version('1.6.3', sha256='757b9a6a470b3c356f638d62269c5b72b8ace374f006658aef8bb6afd1ad1413')
- version('1.6.2', sha256='b51a28cddceca6998b981ad61466617ad624d577ce424c0653d92a680f460061')
- version('1.6.1', sha256='886af50bc12457bbafb06dc927b7fd4cadc3db1b4615b24a08953f6b358debef')
- version('1.6', sha256='695c2d9d3f924103481529f992e3723bdce10b8edfc294421a849cdf51dbbb6e')
- version('1.5.1', sha256='17a7722e5a32d236e4f1f6f88b680da4ba5f52bcf65bca3687c6a1c731d10881')
- version('1.5', sha256='b40b6d19e13f0650e84b8beebe86ce5c09071624f18d66df826f9d8584b4d3c8')
- version('1.4.1', sha256='4b53d371d6cf648c9e9e9bd1f104d349cafeaf10a02866e3f1d05c574b595a21')
+ version('stable', branch='stable', deprecated=True)
+ version('2021.2', sha256='a13180cc05a24c441326a2b209e4d1cc6b176f1b8d7aec1aea46b627e230ff8c', deprecated=True)
+ version('2021.1', sha256='8ce112fc40676690369133188848dfeb3875d57351286cad4c312057a4dd767b', deprecated=True)
+ version('2021', sha256='43bb5a52fec675738f4b5896f0833a1c1090bd7e74f97769697495abf4652e40', deprecated=True)
+ version('1.6.4', sha256='699a835954556cf6b2f20dac7942c1761c6dd6c6c3fbdde62c8bfcfd71ee075b', deprecated=True)
+ version('1.6.3', sha256='757b9a6a470b3c356f638d62269c5b72b8ace374f006658aef8bb6afd1ad1413', deprecated=True)
+ version('1.6.2', sha256='b51a28cddceca6998b981ad61466617ad624d577ce424c0653d92a680f460061', deprecated=True)
+ version('1.6.1', sha256='886af50bc12457bbafb06dc927b7fd4cadc3db1b4615b24a08953f6b358debef', deprecated=True)
+ version('1.6', sha256='695c2d9d3f924103481529f992e3723bdce10b8edfc294421a849cdf51dbbb6e', deprecated=True)
+ version('1.5.1', sha256='17a7722e5a32d236e4f1f6f88b680da4ba5f52bcf65bca3687c6a1c731d10881', deprecated=True)
+ version('1.5', sha256='b40b6d19e13f0650e84b8beebe86ce5c09071624f18d66df826f9d8584b4d3c8', deprecated=True)
+ version('1.4.1', sha256='4b53d371d6cf648c9e9e9bd1f104d349cafeaf10a02866e3f1d05c574b595a21', deprecated=True)
depends_on("cmake@2.8:", type='build')
for v in ["1.4.1", "1.5", "1.5.1", "1.6", "1.6.1", "1.6.2",
"1.6.3", "1.6.4", "2021", "2021.1", "2021.2",
- "master", "stable"]:
+ "stable"]:
depends_on('votca-tools@%s' % v, when="@%s:%s.0" % (v, v))
depends_on('votca-csg@%s' % v, when="@%s:%s.0" % (v, v))
depends_on("libxc", when='@stable,1.5:')
diff --git a/var/spack/repos/builtin/packages/votca/package.py b/var/spack/repos/builtin/packages/votca/package.py
new file mode 100644
index 0000000000..647a5ed13a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/votca/package.py
@@ -0,0 +1,64 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack import *
+
+
+class Votca(CMakePackage):
+ """VOTCA is a software package which focuses on the analysis of molecular
+ dynamics data, the development of systematic coarse-graining techniques
+ as well as methods used for simulating microscopic charge (and exciton)
+ transport in disordered semiconductors.
+ """
+ homepage = "https://www.votca.org"
+ url = "https://github.com/votca/votca/tarball/v2022-rc.1"
+ git = "https://github.com/votca/xtp.git"
+ maintainers = ['junghans']
+
+ version('master', branch='master')
+ version('stable', branch='stable')
+ version('2022-rc.2', sha256='eefde51470ec1437d0127fb02c2745f33e434deff53cdaee97691c36ce447fb1')
+ version('2022-rc.1', sha256='d53ca9fde364a97d91bf3bed15223536ffa598b2dec7bccd459accae265391b1')
+
+ variant('mkl', default=False, description='Build with MKL support')
+ variant('new-gmx', default=False, description='Build against gromacs>2019 - no tabulated kernels')
+ conflicts('votca-tools')
+ conflicts('votca-csg')
+ conflicts('votca-xtp')
+
+ depends_on("cmake@3.13:", type='build')
+ depends_on("expat")
+ depends_on("fftw-api@3")
+ depends_on("eigen@3.3:")
+ depends_on("boost")
+ depends_on('mkl', when='+mkl')
+ depends_on("libxc")
+ depends_on("hdf5+cxx~mpi")
+ depends_on("libint@2.6.0:")
+ depends_on("libecpint")
+ depends_on("py-h5py")
+ depends_on("py-lxml")
+ depends_on("gromacs~mpi@5.1:")
+ depends_on("gromacs~mpi@5.1:2019", when="~new-gmx")
+ depends_on('lammps', type='test')
+ depends_on('py-espresso', type='test')
+ depends_on('py-pytest', type='test')
+
+ def cmake_args(self):
+ args = [
+ '-DINSTALL_RC_FILES=OFF',
+ '-DBUILD_XTP=ON',
+ '-DBUILD_CSGAPPS=ON',
+ ]
+
+ if '~mkl' in self.spec:
+ args.append('-DCMAKE_DISABLE_FIND_PACKAGE_MKL=ON')
+
+ if self.run_tests:
+ args.append('-DENABLE_TESTING=ON')
+ args.append('-DENABLE_REGRESSION_TESTING=ON')
+
+ return args
diff --git a/var/spack/repos/builtin/packages/vtk-h/package.py b/var/spack/repos/builtin/packages/vtk-h/package.py
index 278a6cb373..e876810637 100644
--- a/var/spack/repos/builtin/packages/vtk-h/package.py
+++ b/var/spack/repos/builtin/packages/vtk-h/package.py
@@ -76,8 +76,9 @@ class VtkH(Package, CudaPackage):
depends_on("vtk-m~tbb+openmp", when="+openmp")
depends_on("vtk-m~tbb~openmp", when="~openmp")
- depends_on("vtk-m+cuda~tbb+openmp", when="+cuda+openmp")
- depends_on("vtk-m+cuda~tbb~openmp", when="+cuda~openmp")
+ for _arch in CudaPackage.cuda_arch_values:
+ depends_on("vtk-m+cuda~tbb+openmp cuda_arch={0}".format(_arch), when="+cuda+openmp cuda_arch={0}".format(_arch))
+ depends_on("vtk-m+cuda~tbb~openmp cuda_arch={0}".format(_arch), when="+cuda~openmp cuda_arch={0}".format(_arch))
depends_on("vtk-m~tbb+openmp~shared", when="+openmp~shared")
depends_on("vtk-m~tbb~openmp~shared", when="~openmp~shared")
diff --git a/var/spack/repos/builtin/packages/vtk-m/package.py b/var/spack/repos/builtin/packages/vtk-m/package.py
index 5eb7a68cdb..ca914fd662 100644
--- a/var/spack/repos/builtin/packages/vtk-m/package.py
+++ b/var/spack/repos/builtin/packages/vtk-m/package.py
@@ -89,6 +89,9 @@ class VtkM(CMakePackage, CudaPackage):
conflicts("+hip", when="+cuda")
+ conflicts("+cuda", when="cuda_arch=none",
+ msg="vtk-m +cuda requires that cuda_arch be set")
+
def cmake_args(self):
spec = self.spec
options = []
diff --git a/var/spack/repos/builtin/packages/vtk/internal_findHDF5.patch b/var/spack/repos/builtin/packages/vtk/internal_findHDF5.patch
new file mode 100644
index 0000000000..82775aefbe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/vtk/internal_findHDF5.patch
@@ -0,0 +1,16 @@
+diff -ru a/CMake/FindHDF5.cmake b/CMake/FindHDF5.cmake
+--- a/CMake/FindHDF5.cmake 2019-01-30 11:15:13.000000000 -0600
++++ b/CMake/FindHDF5.cmake 2021-11-15 18:01:39.314264619 -0600
+@@ -3,11 +3,7 @@
+ # (BUG #0014363).
+
+ # include the default FindHDF5.cmake.
+-if(CMAKE_VERSION VERSION_LESS 3.6.1)
+- include(${CMAKE_CURRENT_LIST_DIR}/NewCMake/FindHDF5.cmake)
+-else()
+- include(${CMAKE_ROOT}/Modules/FindHDF5.cmake)
+-endif()
++include(${CMAKE_CURRENT_LIST_DIR}/NewCMake/FindHDF5.cmake)
+
+ if(HDF5_FOUND AND (HDF5_IS_PARALLEL OR HDF5_ENABLE_PARALLEL))
+ include(vtkMPI)
diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py
index 49de4e28b3..07c4fc2ad9 100644
--- a/var/spack/repos/builtin/packages/vtk/package.py
+++ b/var/spack/repos/builtin/packages/vtk/package.py
@@ -21,7 +21,8 @@ class Vtk(CMakePackage):
maintainers = ['chuckatkins', 'danlipsa']
- version('9.0.3', sha256='bc3eb9625b2b8dbfecb6052a2ab091fc91405de4333b0ec68f3323815154ed8a')
+ version('9.1.0', sha256='8fed42f4f8f1eb8083107b68eaa9ad71da07110161a3116ad807f43e5ca5ce96')
+ version('9.0.3', sha256='bc3eb9625b2b8dbfecb6052a2ab091fc91405de4333b0ec68f3323815154ed8a', preferred=True)
version('9.0.1', sha256='1b39a5e191c282861e7af4101eaa8585969a2de05f5646c9199a161213a622c7')
version('9.0.0', sha256='15def4e6f84d72f82386617fe595ec124dda3cbd13ea19a0dcd91583197d8715')
version('8.2.0', sha256='34c3dc775261be5e45a8049155f7228b6bd668106c72a3c435d95730d17d57bb')
@@ -73,6 +74,9 @@ class Vtk(CMakePackage):
# Broken downstream FindMPI
patch('vtkm-findmpi-downstream.patch', when='@9.0.0')
+ # use internal FindHDF5
+ patch('internal_findHDF5.patch', when='@:8')
+
# The use of the OpenGL2 backend requires at least OpenGL Core Profile
# version 3.2 or higher.
depends_on('gl@3.2:', when='+opengl2')
@@ -115,6 +119,18 @@ class Vtk(CMakePackage):
depends_on('eigen', when='@8.2.0:')
depends_on('double-conversion', when='@8.2.0:')
depends_on('sqlite', when='@8.2.0:')
+ depends_on('pugixml', when='@9:')
+ depends_on('libogg')
+ depends_on('libtheora')
+ depends_on('utf8cpp', when='@9:')
+ depends_on('gl2ps', when='@8.1:')
+ depends_on('gl2ps@1.4.1:', when='@9:')
+ depends_on('proj@4', when='@8.2')
+ depends_on('proj@4:7', when='@9:')
+ depends_on('cgns@4.1.1:+mpi', when='@9.1: +mpi')
+ depends_on('cgns@4.1.1:~mpi', when='@9.1: ~mpi')
+ depends_on('seacas@2021-05-12:+mpi', when='@9.1: +mpi')
+ depends_on('seacas@2021-05-12:~mpi', when='@9.1: ~mpi')
# For finding Fujitsu-MPI wrapper commands
patch('find_fujitsu_mpi.patch', when='@:8.2.0%fj')
@@ -147,38 +163,45 @@ class Vtk(CMakePackage):
# (solves #26314)
'-DCMAKE_INSTALL_LIBDIR:PATH=lib',
- # In general, we disable use of VTK "ThirdParty" libs, preferring
- # spack-built versions whenever possible
- '-DVTK_USE_SYSTEM_LIBRARIES:BOOL=ON',
-
- # However, in a few cases we can't do without them yet
- '-DVTK_USE_SYSTEM_GL2PS:BOOL=OFF',
- '-DVTK_USE_SYSTEM_LIBHARU=OFF',
-
- '-DNETCDF_DIR={0}'.format(spec['netcdf-c'].prefix),
- '-DNETCDF_C_ROOT={0}'.format(spec['netcdf-c'].prefix),
- '-DNETCDF_CXX_ROOT={0}'.format(spec['netcdf-cxx'].prefix),
-
# Allow downstream codes (e.g. VisIt) to override VTK's classes
'-DVTK_ALL_NEW_OBJECT_FACTORY:BOOL=ON',
-
- # Disable wrappers for other languages.
- '-DVTK_WRAP_JAVA=OFF',
- '-DVTK_WRAP_TCL=OFF',
]
- # Some variable names have changed
- if spec.satisfies('@8.2.0:'):
+ # Disable wrappers for other languages.
+ cmake_args.append('-DVTK_WRAP_JAVA=OFF')
+ if spec.satisfies('@:8.1'):
+ cmake_args.append('-DVTK_WRAP_TCL=OFF')
+
+ # In general, we disable use of VTK "ThirdParty" libs, preferring
+ # spack-built versions whenever possible but there are exceptions.
+ if spec.satisfies('@:8'):
cmake_args.extend([
- '-DVTK_USE_SYSTEM_OGG:BOOL=OFF',
- '-DVTK_USE_SYSTEM_THEORA:BOOL=OFF',
- '-DVTK_USE_SYSTEM_LIBPROJ:BOOL=OFF',
- '-DVTK_USE_SYSTEM_PUGIXML:BOOL=OFF',
+ '-DVTK_USE_SYSTEM_LIBRARIES:BOOL=ON',
+ '-DVTK_USE_SYSTEM_LIBHARU=OFF',
])
+ if spec.satisfies('@:8.0'):
+ cmake_args.append('-DVTK_USE_SYSTEM_GL2PS=OFF')
else:
cmake_args.extend([
- '-DVTK_USE_SYSTEM_OGGTHEORA:BOOL=OFF',
+ '-DVTK_USE_EXTERNAL:BOOL=ON',
+ '-DVTK_MODULE_USE_EXTERNAL_VTK_libharu:BOOL=OFF',
+ '-DVTK_MODULE_USE_EXTERNAL_VTK_pegtl:BOOL=OFF',
+ '-DHDF5_ROOT={0}'.format(spec['hdf5'].prefix),
+ ])
+ if spec.satisfies('@9.1:'):
+ cmake_args.extend([
+ '-DVTK_MODULE_USE_EXTERNAL_VTK_exprtk:BOOL=OFF',
+ # uses an unreleased version of fmt
+ '-DVTK_MODULE_USE_EXTERNAL_VTK_fmt:BOOL=OFF',
+ ])
+
+ # Some variable names have changed
+ if spec.satisfies('@8.2.0'):
+ cmake_args.append('-DVTK_USE_SYSTEM_PUGIXML:BOOL=OFF')
+ elif spec.satisfies('@:8.1'):
+ cmake_args.extend([
'-DVTK_USE_SYSTEM_LIBPROJ4:BOOL=OFF',
+ '-DNETCDF_CXX_ROOT={0}'.format(spec['netcdf-cxx'].prefix),
])
if '+mpi' in spec:
@@ -191,17 +214,22 @@ class Vtk(CMakePackage):
cmake_args.extend([
'-DVTK_USE_MPI=ON'
])
+ else:
+ '-DVTK_USE_MPI=OFF'
if '+ffmpeg' in spec:
- cmake_args.extend(['-DModule_vtkIOFFMPEG:BOOL=ON'])
+ if spec.satisfies('@:8'):
+ cmake_args.append('-DModule_vtkIOFFMPEG:BOOL=ON')
+ else:
+ cmake_args.append('-DVTK_MODULE_ENABLE_VTK_IOFFMPEG:STRING=YES')
# Enable/Disable wrappers for Python.
if '+python' in spec:
- cmake_args.extend([
- '-DVTK_WRAP_PYTHON=ON',
- '-DPYTHON_EXECUTABLE={0}'.format(spec['python'].command.path),
- ])
- if '+mpi' in spec:
+ cmake_args.append('-DVTK_WRAP_PYTHON=ON')
+ if spec.satisfies('@:8'):
+ cmake_args.append(
+ '-DPYTHON_EXECUTABLE={0}'.format(spec['python'].command.path))
+ if '+mpi' in spec and spec.satisfies('@:8'):
cmake_args.append('-DVTK_USE_SYSTEM_MPI4PY:BOOL=ON')
if spec.satisfies('@9.0.0: ^python@3:'):
cmake_args.append('-DVTK_PYTHON_VERSION=3')
@@ -218,14 +246,16 @@ class Vtk(CMakePackage):
qt_bin = spec['qt'].prefix.bin
qmake_exe = os.path.join(qt_bin, 'qmake')
- cmake_args.extend([
- # Enable Qt support here.
- '-DVTK_QT_VERSION:STRING={0}'.format(qt_ver),
- '-DQT_QMAKE_EXECUTABLE:PATH={0}'.format(qmake_exe),
- '-DVTK_Group_Qt:BOOL=ON',
- ])
# https://github.com/martijnkoopman/Qt-VTK-viewer/blob/master/doc/Build-VTK.md
- if spec.satisfies('@9.0.0:'):
+ # The content of the above link changes over time with versions.
+ # Older commits have information on VTK-8.
+ if spec.satisfies('@:8'):
+ cmake_args.extend([
+ '-DVTK_QT_VERSION:STRING={0}'.format(qt_ver),
+ '-DQT_QMAKE_EXECUTABLE:PATH={0}'.format(qmake_exe),
+ '-DVTK_Group_Qt:BOOL=ON',
+ ])
+ else:
cmake_args.extend([
'-DVTK_GROUP_ENABLE_Qt:STRING=YES',
'-DVTK_MODULE_ENABLE_VTK_GUISupportQt:STRING=YES',
@@ -235,37 +265,54 @@ class Vtk(CMakePackage):
# VTK to build with qt~webkit versions (see the documentation for
# more info: http://www.vtk.org/Wiki/VTK/Tutorials/QtSetup).
if '~webkit' in spec['qt']:
- cmake_args.extend([
- '-DVTK_Group_Qt:BOOL=OFF',
- '-DModule_vtkGUISupportQt:BOOL=ON',
- '-DModule_vtkGUISupportQtOpenGL:BOOL=ON',
- ])
+ if spec.satisfies('@:8'):
+ cmake_args.extend([
+ '-DVTK_Group_Qt:BOOL=OFF',
+ '-DModule_vtkGUISupportQt:BOOL=ON',
+ '-DModule_vtkGUISupportQtOpenGL:BOOL=ON',
+ ])
+ else:
+ cmake_args.extend([
+ '-DVTK_GROUP_ENABLE_Qt:STRING=NO',
+ '-DVTK_MODULE_ENABLE_VTK_GUISupportQt:STRING=YES',
+ ])
if '+xdmf' in spec:
if spec.satisfies('^cmake@3.12:'):
# This policy exists only for CMake >= 3.12
cmake_args.extend(["-DCMAKE_POLICY_DEFAULT_CMP0074=NEW"])
- cmake_args.extend([
- # Enable XDMF Support here
- "-DModule_vtkIOXdmf2:BOOL=ON",
- "-DModule_vtkIOXdmf3:BOOL=ON",
- "-DBOOST_ROOT={0}".format(spec['boost'].prefix),
- "-DBOOST_LIBRARY_DIR={0}".format(spec['boost'].prefix.lib),
- "-DBOOST_INCLUDE_DIR={0}".format(spec['boost'].prefix.include),
- "-DBOOST_NO_SYSTEM_PATHS:BOOL=ON",
- # This is needed because VTK has multiple FindBoost
- # and they stick to system boost if there's a system boost
- # installed with CMake
- "-DBoost_NO_BOOST_CMAKE:BOOL=ON",
- "-DHDF5_ROOT={0}".format(spec['hdf5'].prefix),
- # The xdmf project does not export any CMake file...
- "-DVTK_USE_SYSTEM_XDMF3:BOOL=OFF",
- "-DVTK_USE_SYSTEM_XDMF2:BOOL=OFF"
- ])
+ if spec.satisfies('@:8'):
+ cmake_args.extend([
+ # Enable XDMF Support here
+ "-DModule_vtkIOXdmf2:BOOL=ON",
+ "-DModule_vtkIOXdmf3:BOOL=ON",
+ "-DBOOST_ROOT={0}".format(spec['boost'].prefix),
+ "-DBOOST_LIBRARY_DIR={0}".format(spec['boost'].prefix.lib),
+ "-DBOOST_INCLUDE_DIR={0}".format(spec['boost'].prefix.include),
+ "-DBOOST_NO_SYSTEM_PATHS:BOOL=ON",
+ # This is needed because VTK has multiple FindBoost
+ # and they stick to system boost if there's a system boost
+ # installed with CMake
+ "-DBoost_NO_BOOST_CMAKE:BOOL=ON",
+ # The xdmf project does not export any CMake file...
+ "-DVTK_USE_SYSTEM_XDMF3:BOOL=OFF",
+ "-DVTK_USE_SYSTEM_XDMF2:BOOL=OFF"
+ ])
+ else:
+ cmake_args.extend([
+ '-DVTK_MODULE_ENABLE_VTK_xdmf2:STRING=YES',
+ '-DVTK_MODULE_ENABLE_VTK_xdmf3:STRING=YES',
+ '-DVTK_MODULE_ENABLE_VTK_IOXdmf2:STRING=YES',
+ '-DVTK_MODULE_ENABLE_VTK_IOXdmf3:STRING=YES',
+ ])
if '+mpi' in spec:
- cmake_args.extend(["-DModule_vtkIOParallelXdmf3:BOOL=ON"])
+ if spec.satisfies('@:8'):
+ cmake_args.append("-DModule_vtkIOParallelXdmf3:BOOL=ON")
+ else:
+ cmake_args.append(
+ '-DVTK_MODULE_ENABLE_VTK_IOParallelXdmf3:STRING=YES')
cmake_args.append('-DVTK_RENDERING_BACKEND:STRING=' + opengl_ver)
diff --git a/var/spack/repos/builtin/packages/vvtest/package.py b/var/spack/repos/builtin/packages/vvtest/package.py
index 3b98602f23..3b98602f23 100755..100644
--- a/var/spack/repos/builtin/packages/vvtest/package.py
+++ b/var/spack/repos/builtin/packages/vvtest/package.py
diff --git a/var/spack/repos/builtin/packages/w3emc/package.py b/var/spack/repos/builtin/packages/w3emc/package.py
index 152d26e634..488d0e9ed3 100644
--- a/var/spack/repos/builtin/packages/w3emc/package.py
+++ b/var/spack/repos/builtin/packages/w3emc/package.py
@@ -28,3 +28,10 @@ class W3emc(CMakePackage):
depends_on('nemsio', when='@2.7.3')
depends_on('sigio', when='@2.7.3')
depends_on('netcdf-fortran', when='@2.7.3')
+
+ def setup_run_environment(self, env):
+ for suffix in ('4', '8', 'd'):
+ lib = find_libraries('libw3emc_' + suffix, root=self.prefix,
+ shared=False, recursive=True)
+ env.set('W3EMC_LIB' + suffix, lib[0])
+ env.set('W3EMC_INC' + suffix, join_path(self.prefix, 'include_' + suffix))
diff --git a/var/spack/repos/builtin/packages/warpx/2626.patch b/var/spack/repos/builtin/packages/warpx/2626.patch
new file mode 100644
index 0000000000..699744b89d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/warpx/2626.patch
@@ -0,0 +1,34 @@
+From 9785e706229622626133c4b03c7abd004f62023f Mon Sep 17 00:00:00 2001
+From: Axel Huebl <axel.huebl@plasma.ninja>
+Date: Sat, 4 Dec 2021 15:28:13 -0800
+Subject: [PATCH] Fix: Installed Symlink LIB
+
+The latest patch to these routines broke our library alias in installs.
+
+By default, this variable is relative and needs the prefix appended.
+In some cases, e.g., if externally set, it can already be absolute. In that
+case, we skip adding the prefix.
+---
+ CMakeLists.txt | 7 ++++++-
+ 1 file changed, 6 insertions(+), 1 deletion(-)
+
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 04092ba962..a549546ab9 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -343,9 +343,14 @@ if(WarpX_LIB)
+ else()
+ set(mod_ext "so")
+ endif()
++ if(IS_ABSOLUTE ${CMAKE_INSTALL_LIBDIR})
++ set(ABS_INSTALL_LIB_DIR ${CMAKE_INSTALL_LIBDIR})
++ else()
++ set(ABS_INSTALL_LIB_DIR ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR})
++ endif()
+ install(CODE "file(CREATE_LINK
+ $<TARGET_FILE_NAME:shared>
+- ${CMAKE_INSTALL_LIBDIR}/libwarpx.${lib_suffix}.${mod_ext}
++ ${ABS_INSTALL_LIB_DIR}/libwarpx.${lib_suffix}.${mod_ext}
+ COPY_ON_ERROR SYMBOLIC)")
+ endif()
+
diff --git a/var/spack/repos/builtin/packages/warpx/package.py b/var/spack/repos/builtin/packages/warpx/package.py
index 8306115128..2e67026e07 100644
--- a/var/spack/repos/builtin/packages/warpx/package.py
+++ b/var/spack/repos/builtin/packages/warpx/package.py
@@ -17,7 +17,7 @@ class Warpx(CMakePackage):
"""
homepage = "https://ecp-warpx.github.io"
- url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/21.07.tar.gz"
+ url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/21.12.tar.gz"
git = "https://github.com/ECP-WarpX/WarpX.git"
maintainers = ['ax3l', 'dpgrote', 'MaxThevenet', 'RemiLehe']
@@ -25,6 +25,8 @@ class Warpx(CMakePackage):
# NOTE: if you update the versions here, also see py-warpx
version('develop', branch='development')
+ # 22.01+ requires C++17 or newer
+ version('21.12', sha256='847c98aac20c73d94c823378803c82be9a14139f1c14ea483757229b452ce4c1')
version('21.11', sha256='ce60377771c732033a77351cd3500b24b5d14b54a5adc7a622767b9251c10d0b')
version('21.10', sha256='d372c573f0360094d5982d64eceeb0149d6620eb75e8fdbfdc6777f3328fb454')
version('21.09', sha256='861a65f11846541c803564db133c8678b9e8779e69902ef1637b21399d257eab')
@@ -33,6 +35,7 @@ class Warpx(CMakePackage):
version('21.06', sha256='a26039dc4061da45e779dd5002467c67a533fc08d30841e01e7abb3a890fbe30')
version('21.05', sha256='f835f0ae6c5702550d23191aa0bb0722f981abb1460410e3d8952bc3d945a9fc')
version('21.04', sha256='51d2d8b4542eada96216e8b128c0545c4b7527addc2038efebe586c32c4020a0')
+ # 20.01+ requires C++14 or newer
variant('app', default=True,
description='Build the WarpX executable application')
@@ -45,7 +48,7 @@ class Warpx(CMakePackage):
description='On-node, accelerated computing backend')
variant('dims',
default='3',
- values=('2', '3', 'rz'),
+ values=('1', '2', '3', 'rz'),
multi=False,
description='Number of spatial dimensions')
variant('eb', default=False,
@@ -104,12 +107,19 @@ class Warpx(CMakePackage):
depends_on('rocprim')
depends_on('rocrand')
+ conflicts('dims=1', when='@:21.12',
+ msg='WarpX 1D support starts in 22.01+')
conflicts('~qed +qedtablegen',
msg='WarpX PICSAR QED table generation needs +qed')
conflicts('compute=sycl', when='+psatd',
msg='WarpX spectral solvers are not yet tested with SYCL '
'(use "warpx ~psatd")')
+ # The symbolic aliases for our +lib target were missing in the install
+ # location
+ # https://github.com/ECP-WarpX/WarpX/pull/2626
+ patch('2626.patch', when='@21.12')
+
def cmake_args(self):
spec = self.spec
@@ -135,11 +145,14 @@ class Warpx(CMakePackage):
self.define_from_variant('WarpX_QED_TABLE_GEN', 'qedtablegen'),
]
+ with when('+openpmd'):
+ args.append('-DWarpX_openpmd_internal=OFF')
+
return args
@property
def libs(self):
- libsuffix = {'2': '2d', '3': '3d', 'rz': 'rz'}
+ libsuffix = {'1': '1d', '2': '2d', '3': '3d', 'rz': 'rz'}
dims = self.spec.variants['dims'].value
return find_libraries(
['libwarpx.' + libsuffix[dims]], root=self.prefix, recursive=True,
@@ -155,7 +168,8 @@ class Warpx(CMakePackage):
self.install_test_root if post_install else self.stage.source_path,
self.examples_src_dir)
dims = self.spec.variants['dims'].value
- inputs_nD = {'2': 'inputs_2d', '3': 'inputs_3d', 'rz': 'inputs_2d_rz'}
+ inputs_nD = {'1': 'inputs_1d', '2': 'inputs_2d', '3': 'inputs_3d',
+ 'rz': 'inputs_2d_rz'}
inputs = join_path(examples_dir, inputs_nD[dims])
cli_args = [inputs, "max_step=50", "diag1.intervals=10"]
diff --git a/var/spack/repos/builtin/packages/whizard/package.py b/var/spack/repos/builtin/packages/whizard/package.py
index 883a18c983..f4ef68b674 100644
--- a/var/spack/repos/builtin/packages/whizard/package.py
+++ b/var/spack/repos/builtin/packages/whizard/package.py
@@ -58,6 +58,7 @@ class Whizard(AutotoolsPackage):
variant('latex', default=False,
description="data visualization with latex")
+ depends_on('libtirpc')
depends_on('ocaml@4.02.3:', type='build', when="@3:")
depends_on('ocaml@4.02.3:~force-safe-string', type='build', when="@:2")
depends_on('hepmc', when="hepmc=2")
diff --git a/var/spack/repos/builtin/packages/wi4mpi/package.py b/var/spack/repos/builtin/packages/wi4mpi/package.py
new file mode 100644
index 0000000000..93d81cdaeb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/wi4mpi/package.py
@@ -0,0 +1,55 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Wi4mpi(CMakePackage):
+ """WI4MPI: Wrapper Interface For MPI performing a light translation between MPI
+ constants and MPI objects from an MPI implementation to another one"""
+
+ homepage = "https://github.com/cea-hpc/wi4mpi"
+ url = "https://github.com/cea-hpc/wi4mpi/archive/v3.4.1.tar.gz"
+ maintainers = ['adrien-cotte', 'marcjoos-cea']
+
+ version('3.4.1', sha256='92bf6738216426069bc07bff19cd7c933e33e397a941ff9f89a639380fab3737')
+ version('3.3.0', sha256='fb7fb3b591144e90b3d688cf844c2246eb185f54e1da6baef857e035ef730d96')
+ version('3.2.2', sha256='23ac69740577d66a68ddd5360670f0a344e3c47a5d146033c63a67e54e56c66f')
+ version('3.2.1', sha256='0d928cb930b6cb1ae648eca241db59812ee0e5c041faf2f57728bbb6ee4e36df')
+ version('3.2.0', sha256='3322f6823dbec1d58a1fcf163b2bcdd7b9cd75dc6c7f78865fc6cb0a91bf6f94')
+ variant('build_type', default='Release',
+ description='The build type to build',
+ values=('Debug', 'Release', 'RelWithDebInfo'))
+
+ depends_on('mpi')
+
+ def cmake_args(self):
+ if '%gcc' in self.spec:
+ compiler = "GNU"
+ elif '%intel' in self.spec:
+ compiler = "INTEL"
+ elif '%clang' in self.spec:
+ compiler = "LLVM"
+ elif '%pgi' in self.spec:
+ compiler = "PGI"
+ else:
+ tty.error("Could not determine compiler used")
+ wi4mpi_build_type = 'RELEASE'
+ if self.spec.variants["build_type"].value == "RelWithDebInfo":
+ wi4mpi_build_type = 'NORMAL'
+ elif self.spec.variants["build_type"].value == "Debug":
+ wi4mpi_build_type = 'DEBUG'
+ args = [
+ self.define('WI4MPI_REALEASE', wi4mpi_build_type),
+ self.define('WI4MPI_COMPILER', compiler)
+ ]
+ return args
+
+ def setup_run_environment(self, env):
+ env.set('WI4MPI_ROOT', self.prefix)
+ env.set('WI4MPI_VERSION', self.version)
+ env.set('WI4MPI_CC', self.compiler.cc)
+ env.set('WI4MPI_CXX', self.compiler.cxx)
+ env.set('WI4MPI_FC', self.compiler.fc)
diff --git a/var/spack/repos/builtin/packages/wiredtiger/package.py b/var/spack/repos/builtin/packages/wiredtiger/package.py
new file mode 100644
index 0000000000..a613d9dc46
--- /dev/null
+++ b/var/spack/repos/builtin/packages/wiredtiger/package.py
@@ -0,0 +1,39 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Wiredtiger(AutotoolsPackage):
+ """WiredTiger is an high performance, scalable, production quality,
+ NoSQL, Open Source extensible platform for data management."""
+
+ homepage = "https://source.wiredtiger.com/"
+ url = "https://github.com/wiredtiger/wiredtiger/releases/download/10.0.0/wiredtiger-10.0.0.tar.bz2"
+
+ version('10.0.0', sha256='4830107ac744c0459ef99697652aa3e655c2122005a469a49d221e692fb834a5')
+
+ depends_on('python@3:', type=('build', 'run'), when='+python')
+ depends_on('swig', type=('build', 'run'), when='+python')
+ depends_on('lz4', when='+lz4')
+ depends_on('snappy', when='+snappy')
+ depends_on('zlib', when='+zlib')
+ depends_on('zstd', when='+zstd')
+ depends_on('rsync', type='build')
+
+ variant('python', default=False, description='Compile Python API')
+ variant('lz4', default=False, description='Build the lz4 compressor extension')
+ variant('snappy', default=False, description='Build the snappy compressor extension')
+ variant('zlib', default=False, description='Build the zlib compressor extension')
+ variant('zstd', default=False, description='Build the zstd compressor extension')
+
+ def configure_args(self):
+ args = []
+ args += self.enable_or_disable('python')
+ args += self.enable_or_disable('lz4')
+ args += self.enable_or_disable('snappy')
+ args += self.enable_or_disable('zlib')
+ args += self.enable_or_disable('zstd')
+ return args
diff --git a/var/spack/repos/builtin/packages/wonton/package.py b/var/spack/repos/builtin/packages/wonton/package.py
index 9dd5492488..29503a9a10 100644
--- a/var/spack/repos/builtin/packages/wonton/package.py
+++ b/var/spack/repos/builtin/packages/wonton/package.py
@@ -33,7 +33,7 @@ class Wonton(CMakePackage):
variant('kokkos', default=False, description='Enable on-node or device parallelism with Kokkos')
variant('openmp', default=False, description="Enable on-node parallelism using OpenMP")
variant('cuda', default=False, description="Enable GPU parallelism using CUDA")
-
+ variant('flecsi', default=False, description="Enable FlecSI")
# wrappers to external mesh/state libraries
variant('jali', default=False, description='Enable Jali mesh wrappers')
@@ -47,6 +47,7 @@ class Wonton(CMakePackage):
depends_on('netlib-lapack +lapacke', when='+lapacke')
depends_on('mpi', when='+mpi')
+ depends_on('flecsi', when='+flecsi')
depends_on('jali +mstk', when='+jali')
depends_on('mpi', when='+jali')
@@ -113,10 +114,8 @@ class Wonton(CMakePackage):
else:
options.append('-DWONTON_ENABLE_Jali=OFF')
- if '+flecsi' in self.spec:
- options.append('-DWONTON_ENABLE_FleCSI=ON')
- else:
- options.append('-DWONTON_ENABLE_FleCSI=OFF')
+ # BROKEN DEPENDENCY!!!!!!
+ options.append(self.define_from_variant('WONTON_ENABLE_FleCSI', 'flecsi'))
# Unit test variant
if self.run_tests:
@@ -127,3 +126,8 @@ class Wonton(CMakePackage):
options.append('-DENABLE_APP_TESTS=OFF')
return options
+
+ def check(self):
+ if self.run_tests:
+ with working_dir(self.build_directory):
+ make("test")
diff --git a/var/spack/repos/builtin/packages/xrootd/no-systemd.patch b/var/spack/repos/builtin/packages/xrootd/no-systemd.patch
new file mode 100644
index 0000000000..44099f0b02
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xrootd/no-systemd.patch
@@ -0,0 +1,17 @@
+--- a/cmake/XRootDFindLibs.cmake 2021-07-29 12:22:48.000000000 +0000
++++ b/cmake/XRootDFindLibs.cmake 2021-10-25 18:26:07.308918231 +0000
+@@ -26,10 +26,10 @@
+ add_definitions( -DHAVE_XML2 )
+ endif()
+
+-find_package( Systemd )
+-if( SYSTEMD_FOUND )
+- add_definitions( -DHAVE_SYSTEMD )
+-endif()
++#find_package( Systemd )
++#if( SYSTEMD_FOUND )
++# add_definitions( -DHAVE_SYSTEMD )
++#endif()
+
+ find_package( CURL )
+
diff --git a/var/spack/repos/builtin/packages/xrootd/package.py b/var/spack/repos/builtin/packages/xrootd/package.py
index 3b75fb86db..73ce5d99c0 100644
--- a/var/spack/repos/builtin/packages/xrootd/package.py
+++ b/var/spack/repos/builtin/packages/xrootd/package.py
@@ -14,6 +14,7 @@ class Xrootd(CMakePackage):
url = "http://xrootd.org/download/v5.3.1/xrootd-5.3.1.tar.gz"
list_url = 'https://xrootd.slac.stanford.edu/dload.html'
+ version('5.3.2', sha256='e8371fb9e86769bece74b9b9d67cb695023cd6a20a1199386fddd9ed840b0875')
version('5.3.1', sha256='7ea3a112ae9d8915eb3a06616141e5a0ee366ce9a5e4d92407b846b37704ee98')
version('5.1.0', sha256='c639536f1bdc5b6b365e807f3337ed2d41012cd3df608d40e91ed05f1c568b6d')
version('5.0.3', sha256='be40a1897d6c1f153d3e23c39fe96e45063bfafc3cc073db88a1a9531db79ac5')
@@ -50,6 +51,9 @@ class Xrootd(CMakePackage):
variant('readline', default=True,
description='Use readline')
+ variant('krb5', default=False,
+ description='Build with KRB5 support')
+
variant('cxxstd',
default='11',
values=('98', '11', '14', '17'),
@@ -67,10 +71,16 @@ class Xrootd(CMakePackage):
depends_on('readline', when='+readline')
depends_on('xz')
depends_on('zlib')
+ depends_on('curl')
+ depends_on('krb5', when='+krb5')
+ depends_on('json-c')
extends('python', when='+python')
patch('python-support.patch', level=1, when='@:4.8+python')
+ # do not use systemd
+ patch('no-systemd.patch')
+
def patch(self):
"""Remove hardcoded -std=c++0x flag
"""
@@ -86,6 +96,8 @@ class Xrootd(CMakePackage):
format('ON' if '+python' in spec else 'OFF'),
'-DENABLE_READLINE:BOOL={0}'.
format('ON' if '+readline' in spec else 'OFF'),
+ '-DENABLE_KRB5:BOOL={0}'.
+ format('ON' if '+krb5' in spec else 'OFF'),
'-DENABLE_CEPH:BOOL=OFF'
]
# see https://github.com/spack/spack/pull/11581
diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py
index 0ee4c05161..b6f496867e 100644
--- a/var/spack/repos/builtin/packages/xsdk/package.py
+++ b/var/spack/repos/builtin/packages/xsdk/package.py
@@ -5,28 +5,92 @@
import sys
+from copy import deepcopy
from spack import *
-class Xsdk(BundlePackage):
+def xsdk_depends_on_accl(accl_name, accl_var, *args, **kwargs):
+ if accl_name == 'cuda':
+ accl_arch_name = 'cuda_arch'
+ accl_arch_values = list(deepcopy(CudaPackage.cuda_arch_values))
+ elif accl_name == 'rocm':
+ accl_arch_name = 'amdgpu_target'
+ accl_arch_values = list(deepcopy(ROCmPackage.amdgpu_targets))
+ # require ~cuda when xsdk~cuda (and '?cuda' not used)
+ usedep = 1
+ args_new = list(deepcopy(args))
+ if not isinstance(accl_var, list):
+ accl_var = [accl_var]
+ for idx, var in enumerate(accl_var):
+ # skip variants starting with '?' so that
+ # that that they are left unspecified by xsdk
+ if not var.startswith('?'):
+ args_new[0] += ' ~%s' % var
+ else:
+ accl_var[idx] = var.replace('?', '')
+ # if '?cuda' skip adding '~cuda' dep
+ if var == '?' + accl_name:
+ usedep = 0
+ kwargs_new = deepcopy(kwargs)
+ if 'when' in kwargs_new:
+ kwargs_new['when'] += ' ~' + accl_name
+ else:
+ kwargs_new['when'] = '~' + accl_name
+ if usedep:
+ depends_on(*args_new, **kwargs_new)
+
+ # require +cuda when xsdk+cuda, and match the arch
+ for arch in accl_arch_values:
+ args_new = list(deepcopy(args))
+ kwargs_new = deepcopy(kwargs)
+ args_new[0] += '+%s %s=%s' % ('+'.join(accl_var), accl_arch_name, str(arch))
+ if 'when' in kwargs_new:
+ kwargs_new['when'] += ' +%s %s=%s' % (accl_name, accl_arch_name, str(arch))
+ else:
+ kwargs_new['when'] = '+%s %s=%s' % (accl_name, accl_arch_name, str(arch))
+ depends_on(*args_new, **kwargs_new)
+
+
+def xsdk_depends_on(spec, cuda_var='', rocm_var='', **kwargs):
+ """
+ Wrapper for depends_on which can handle propagating cuda and rocm
+ variants.
+
+ Currently, it propagates +cuda_var when xsdk+cuda and rocm_var
+ when xsdk+rocm. When xsdk~[cuda|rocm], then ~[cuda|rocm]_var is
+ selected unless the variant string is prefixed with a '?'
+ (see the tasmanian use below). When '?' prefix is used, then
+ the variant is left unspecified.
+
+ [cuda|rocm]_var can be an array of variant strings or just a single
+ variant string. The spack '+' and '~' symbols should not appear
+ in the strings.
+ """
+ if bool(cuda_var):
+ xsdk_depends_on_accl('cuda', cuda_var, spec, **kwargs)
+ if bool(rocm_var):
+ xsdk_depends_on_accl('rocm', rocm_var, spec, **kwargs)
+ if not bool(cuda_var) and not bool(rocm_var):
+ depends_on(spec, **kwargs)
+
+
+class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
"""Xsdk is a suite of Department of Energy (DOE) packages for numerical
simulation. This is a Spack bundle package that installs the xSDK
packages
"""
homepage = "https://xsdk.info"
-
- maintainers = ['balay', 'luszczek']
+ maintainers = ['balay', 'luszczek', 'balos1']
version('develop')
+ version('0.7.0')
version('0.6.0')
- version('0.5.0')
+ version('0.5.0', deprecated=True)
version('0.4.0', deprecated=True)
version('0.3.0', deprecated=True)
- variant('debug', default=False, description='Compile in debug mode')
- variant('cuda', default=False, description='Enable CUDA dependent packages')
variant('trilinos', default=True, description='Enable trilinos package build')
variant('datatransferkit', default=True, description='Enable datatransferkit package build')
variant('omega-h', default=True, description='Enable omega-h package build')
@@ -43,133 +107,196 @@ class Xsdk(BundlePackage):
variant('slate', default=True, description='Enable slate package build')
variant('arborx', default=True, description='Enable ArborX build')
- depends_on('hypre@develop+superlu-dist+shared', when='@develop')
- depends_on('hypre@2.20.0+superlu-dist+shared', when='@0.6.0')
- depends_on('hypre@2.18.2+superlu-dist+shared', when='@0.5.0')
- depends_on('hypre@2.15.1~internal-superlu', when='@0.4.0')
- depends_on('hypre@2.12.1~internal-superlu', when='@0.3.0')
-
- depends_on('mfem@develop+mpi+superlu-dist+petsc+sundials+examples+miniapps', when='@develop')
- depends_on('mfem@4.2.0+mpi+superlu-dist+petsc+sundials+examples+miniapps', when='@0.6.0')
- depends_on('mfem@4.0.1-xsdk+mpi~superlu-dist+petsc+sundials+examples+miniapps', when='@0.5.0')
- depends_on('mfem@3.4.0+mpi+superlu-dist+petsc+sundials+examples+miniapps', when='@0.4.0')
- depends_on('mfem@3.3.2+mpi+superlu-dist+petsc+sundials+examples+miniapps', when='@0.3.0')
-
- depends_on('superlu-dist@develop', when='@develop')
- depends_on('superlu-dist@6.4.0', when='@0.6.0')
- depends_on('superlu-dist@6.1.1', when='@0.5.0')
- depends_on('superlu-dist@6.1.0', when='@0.4.0')
- depends_on('superlu-dist@5.2.2', when='@0.3.0')
-
- depends_on('trilinos@develop+hypre+superlu-dist+hdf5~mumps+boost~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2~exodus~dtk+intrepid2+shards+stratimikos gotype=int cxxstd=14',
- when='@develop +trilinos')
- depends_on('trilinos@13.0.1+hypre+superlu-dist+hdf5~mumps+boost~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2~exodus~dtk+intrepid2+shards gotype=int',
- when='@0.6.0 +trilinos')
- depends_on('trilinos@12.18.1+hypre+superlu-dist+hdf5~mumps+boost~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2~exodus+dtk+intrepid2+shards',
- when='@0.5.0 +trilinos')
- depends_on('trilinos@12.14.1+hypre+superlu-dist+hdf5~mumps+boost~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2~exodus+dtk+intrepid2+shards',
- when='@0.4.0 +trilinos')
- depends_on('trilinos@12.12.1+hypre+superlu-dist+hdf5~mumps+boost~suite-sparse~tpetra~ifpack2~zoltan~zoltan2~amesos2~exodus',
- when='@0.3.0 +trilinos')
-
- depends_on('datatransferkit@master', when='@develop +trilinos +datatransferkit')
- depends_on('datatransferkit@3.1-rc2', when='@0.6.0 +trilinos +datatransferkit')
-
- depends_on('petsc +trilinos', when='+trilinos')
- depends_on('petsc +cuda', when='+cuda @0.6.0:')
- depends_on('petsc +batch', when='platform=cray @0.5.0:')
- depends_on('petsc@main+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
- when='@develop')
- depends_on('petsc@3.14.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
- when='@0.6.0')
- depends_on('petsc@3.12.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
- when='@0.5.0')
- depends_on('petsc@3.10.3+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
- when='@0.4.0')
- depends_on('petsc@3.8.2+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
- when='@0.3.0')
-
- depends_on('dealii +trilinos~adol-c', when='+trilinos +dealii')
- depends_on('dealii ~trilinos', when='~trilinos +dealii')
- depends_on('dealii@master~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5~netcdf+metis~sundials~ginkgo~symengine~nanoflann', when='@develop +dealii')
- depends_on('dealii@9.2.0~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5~netcdf+metis~sundials~ginkgo~symengine~simplex~arborx', when='@0.6.0 +dealii')
- depends_on('dealii@9.1.1~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5~netcdf+metis~sundials~ginkgo~symengine', when='@0.5.0 +dealii')
- depends_on('dealii@9.0.1~assimp~python~doc~gmsh+petsc~slepc+mpi~int64+hdf5~netcdf+metis~ginkgo~symengine', when='@0.4.0 +dealii')
-
- depends_on('pflotran@develop', when='@develop')
- depends_on('pflotran@xsdk-0.6.0', when='@0.6.0')
- depends_on('pflotran@xsdk-0.5.0', when='@0.5.0')
- depends_on('pflotran@xsdk-0.4.0', when='@0.4.0')
- depends_on('pflotran@xsdk-0.3.0', when='@0.3.0')
-
- depends_on('alquimia@develop', when='@develop +alquimia')
- depends_on('alquimia@xsdk-0.6.0', when='@0.6.0 +alquimia')
- depends_on('alquimia@xsdk-0.5.0', when='@0.5.0 +alquimia ')
- depends_on('alquimia@xsdk-0.4.0', when='@0.4.0 +alquimia')
- depends_on('alquimia@xsdk-0.3.0', when='@0.3.0 +alquimia')
-
- depends_on('sundials +cuda', when='+cuda @0.6.0:')
- depends_on('sundials +trilinos', when='+trilinos @0.6.0:')
- depends_on('sundials@develop~int64+hypre+petsc+superlu-dist', when='@develop')
- depends_on('sundials@5.5.0~int64+hypre+petsc+superlu-dist', when='@0.6.0')
- depends_on('sundials@5.0.0~int64+hypre+petsc+superlu-dist', when='@0.5.0')
- depends_on('sundials@3.2.1~int64+hypre', when='@0.4.0')
- depends_on('sundials@3.1.0~int64+hypre', when='@0.3.0')
-
- depends_on('plasma@20.9.20:', when='@develop %gcc@6.0:')
- depends_on('plasma@20.9.20:', when='@0.6.0 %gcc@6.0:')
- depends_on('plasma@19.8.1:', when='@0.5.0 %gcc@6.0:')
- depends_on('plasma@18.11.1:', when='@0.4.0 %gcc@6.0:')
-
- depends_on('magma@2.5.4', when='@develop +cuda')
- depends_on('magma@2.5.4', when='@0.6.0 +cuda')
- depends_on('magma@2.5.1', when='@0.5.0 +cuda')
- depends_on('magma@2.4.0', when='@0.4.0 +cuda')
- depends_on('magma@2.2.0', when='@0.3.0 +cuda')
-
- depends_on('amrex@develop', when='@develop %intel')
- depends_on('amrex@develop', when='@develop %gcc')
- depends_on('amrex@20.10', when='@0.6.0 %intel')
- depends_on('amrex@20.10', when='@0.6.0 %gcc')
- depends_on('amrex@19.08', when='@0.5.0 %intel')
- depends_on('amrex@19.08', when='@0.5.0 %gcc')
- depends_on('amrex@18.10.1', when='@0.4.0 %intel')
- depends_on('amrex@18.10.1', when='@0.4.0 %gcc')
-
- depends_on('slepc@main', when='@develop')
- depends_on('slepc@3.14.0', when='@0.6.0')
- depends_on('slepc@3.12.0', when='@0.5.0')
- depends_on('slepc@3.10.1', when='@0.4.0')
-
- depends_on('omega-h +trilinos', when='+trilinos +omega-h')
- depends_on('omega-h ~trilinos', when='~trilinos +omega-h')
- depends_on('omega-h@main', when='@develop +omega-h')
- depends_on('omega-h@9.32.5', when='@0.6.0 +omega-h')
- depends_on('omega-h@9.29.0', when='@0.5.0 +omega-h')
- depends_on('omega-h@9.19.1', when='@0.4.0 +omega-h')
-
- depends_on('strumpack ~cuda', when='~cuda @0.6.0: +strumpack')
- depends_on('strumpack@master~slate~openmp', when='@develop +strumpack')
- depends_on('strumpack@5.0.0~slate~openmp', when='@0.6.0 +strumpack')
- depends_on('strumpack@3.3.0~slate~openmp', when='@0.5.0 +strumpack')
- depends_on('strumpack@3.1.1~slate~openmp', when='@0.4.0 +strumpack')
-
- depends_on('pumi@master', when='@develop')
- depends_on('pumi@2.2.5', when='@0.6.0')
- depends_on('pumi@2.2.1', when='@0.5.0')
- depends_on('pumi@2.2.0', when='@0.4.0')
+ xsdk_depends_on('hypre@develop+superlu-dist+shared', when='@develop',
+ cuda_var='cuda')
+ xsdk_depends_on('hypre@2.23.0+superlu-dist+shared', when='@0.7.0',
+ cuda_var='cuda')
+ xsdk_depends_on('hypre@2.20.0+superlu-dist+shared', when='@0.6.0')
+ xsdk_depends_on('hypre@2.18.2+superlu-dist+shared', when='@0.5.0')
+ xsdk_depends_on('hypre@2.15.1~internal-superlu', when='@0.4.0')
+ xsdk_depends_on('hypre@2.12.1~internal-superlu', when='@0.3.0')
+
+ xsdk_depends_on('mfem@develop+mpi+superlu-dist+petsc+sundials+examples+miniapps',
+ when='@develop', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('mfem@4.3.0+mpi+superlu-dist+petsc+sundials+examples+miniapps',
+ when='@0.7.0', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('mfem@4.2.0+mpi+superlu-dist+petsc+sundials+examples+miniapps',
+ when='@0.6.0', cuda_var='cuda')
+ xsdk_depends_on('mfem@4.0.1-xsdk+mpi~superlu-dist+petsc+sundials+examples+miniapps',
+ when='@0.5.0')
+ xsdk_depends_on('mfem@3.4.0+mpi+superlu-dist+petsc+sundials+examples+miniapps',
+ when='@0.4.0')
+ xsdk_depends_on('mfem@3.3.2+mpi+superlu-dist+petsc+sundials+examples+miniapps',
+ when='@0.3.0')
+
+ xsdk_depends_on('superlu-dist@develop', when='@develop')
+ xsdk_depends_on('superlu-dist@7.1.1', when='@0.7.0')
+ xsdk_depends_on('superlu-dist@6.4.0', when='@0.6.0')
+ xsdk_depends_on('superlu-dist@6.1.1', when='@0.5.0')
+ xsdk_depends_on('superlu-dist@6.1.0', when='@0.4.0')
+ xsdk_depends_on('superlu-dist@5.2.2', when='@0.3.0')
+ xsdk_depends_on('trilinos@develop+hypre+superlu-dist+hdf5~mumps+boost' +
+ '~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2' +
+ '~exodus~dtk+intrepid2+shards+stratimikos gotype=int' +
+ ' cxxstd=14', when='@develop +trilinos')
+ xsdk_depends_on('trilinos@13.2.0+hypre+superlu-dist+hdf5~mumps+boost' +
+ '~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2' +
+ '~exodus~dtk+intrepid2+shards+stratimikos gotype=int' +
+ ' cxxstd=14', when='@0.7.0 +trilinos')
+ xsdk_depends_on('trilinos@13.0.1+hypre+superlu-dist+hdf5~mumps+boost' +
+ '~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2' +
+ '~exodus~dtk+intrepid2+shards gotype=int' +
+ ' cxxstd=11', when='@0.6.0 +trilinos')
+ xsdk_depends_on('trilinos@12.18.1+hypre+superlu-dist+hdf5~mumps+boost' +
+ '~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2' +
+ '~exodus+dtk+intrepid2+shards', when='@0.5.0 +trilinos')
+ xsdk_depends_on('trilinos@12.14.1+hypre+superlu-dist+hdf5~mumps+boost' +
+ '~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2' +
+ '~exodus+dtk+intrepid2+shards', when='@0.4.0 +trilinos')
+ xsdk_depends_on('trilinos@12.12.1+hypre+superlu-dist+hdf5~mumps+boost' +
+ '~suite-sparse~tpetra~ifpack2~zoltan~zoltan2~amesos2'
+ '~exodus', when='@0.3.0 +trilinos')
+
+ xsdk_depends_on('datatransferkit@master',
+ when='@develop +trilinos +datatransferkit')
+ dtk7ver = '3.1-rc2' if sys.platform == 'darwin' else '3.1-rc3'
+ xsdk_depends_on('datatransferkit@' + dtk7ver,
+ when='@0.7.0 +trilinos +datatransferkit')
+ xsdk_depends_on('datatransferkit@3.1-rc2',
+ when='@0.6.0 +trilinos +datatransferkit')
+
+ xsdk_depends_on('petsc +trilinos', when='+trilinos @:0.6.0')
+ xsdk_depends_on('petsc +batch', when='platform=cray @0.5.0:')
+ xsdk_depends_on('petsc@main+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
+ when='@develop', cuda_var='cuda')
+ xsdk_depends_on('petsc@3.16.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
+ when='@0.7.0', cuda_var='cuda')
+ xsdk_depends_on('petsc@3.14.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
+ when='@0.6.0', cuda_var='cuda')
+ xsdk_depends_on('petsc@3.12.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
+ when='@0.5.0')
+ xsdk_depends_on('petsc@3.10.3+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
+ when='@0.4.0')
+ xsdk_depends_on('petsc@3.8.2+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
+ when='@0.3.0')
+
+ xsdk_depends_on('dealii +trilinos~adol-c', when='+trilinos +dealii')
+ xsdk_depends_on('dealii ~trilinos', when='~trilinos +dealii')
+ xsdk_depends_on('dealii@master~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5' +
+ '~netcdf+metis~sundials~ginkgo~symengine~nanoflann~simplex~arborx',
+ when='@develop +dealii')
+ xsdk_depends_on('dealii@9.3.2~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5' +
+ '~netcdf+metis~sundials~ginkgo~symengine~simplex~arborx',
+ when='@0.7.0 +dealii')
+ xsdk_depends_on('dealii@9.2.0~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5' +
+ '~netcdf+metis~sundials~ginkgo~symengine~simplex~arborx',
+ when='@0.6.0 +dealii')
+ xsdk_depends_on('dealii@9.1.1~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5' +
+ '~netcdf+metis~sundials~ginkgo~symengine',
+ when='@0.5.0 +dealii')
+ xsdk_depends_on('dealii@9.0.1~assimp~python~doc~gmsh+petsc~slepc+mpi~int64+hdf5' +
+ '~netcdf+metis~ginkgo~symengine',
+ when='@0.4.0 +dealii')
+
+ xsdk_depends_on('pflotran@develop', when='@develop')
+ xsdk_depends_on('pflotran@3.0.2', when='@0.7.0')
+ xsdk_depends_on('pflotran@xsdk-0.6.0', when='@0.6.0')
+ xsdk_depends_on('pflotran@xsdk-0.5.0', when='@0.5.0')
+ xsdk_depends_on('pflotran@xsdk-0.4.0', when='@0.4.0')
+ xsdk_depends_on('pflotran@xsdk-0.3.0', when='@0.3.0')
+
+ xsdk_depends_on('alquimia@develop', when='@develop +alquimia')
+ xsdk_depends_on('alquimia@1.0.9', when='@0.7.0 +alquimia')
+ xsdk_depends_on('alquimia@xsdk-0.6.0', when='@0.6.0 +alquimia')
+ xsdk_depends_on('alquimia@xsdk-0.5.0', when='@0.5.0 +alquimia ')
+ xsdk_depends_on('alquimia@xsdk-0.4.0', when='@0.4.0 +alquimia')
+ xsdk_depends_on('alquimia@xsdk-0.3.0', when='@0.3.0 +alquimia')
+
+ xsdk_depends_on('sundials +trilinos', when='+trilinos @0.6.0:')
+ xsdk_depends_on('sundials@develop~int64+hypre+petsc+superlu-dist',
+ when='@develop', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('sundials@5.8.0~int64+hypre+petsc+superlu-dist',
+ when='@0.7.0', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('sundials@5.5.0~int64+hypre+petsc+superlu-dist',
+ when='@0.6.0', cuda_var='cuda')
+ xsdk_depends_on('sundials@5.0.0~int64+hypre+petsc+superlu-dist', when='@0.5.0')
+ xsdk_depends_on('sundials@3.2.1~int64+hypre', when='@0.4.0')
+ xsdk_depends_on('sundials@3.1.0~int64+hypre', when='@0.3.0')
+
+ xsdk_depends_on('plasma@develop:', when='@develop %gcc@6.0:')
+ xsdk_depends_on('plasma@21.8.29:', when='@0.7.0 %gcc@6.0:')
+ xsdk_depends_on('plasma@20.9.20:', when='@0.6.0 %gcc@6.0:')
+ xsdk_depends_on('plasma@19.8.1:', when='@0.5.0 %gcc@6.0:')
+ xsdk_depends_on('plasma@18.11.1:', when='@0.4.0 %gcc@6.0:')
+
+ xsdk_depends_on('magma@master', when='@develop', cuda_var='?cuda', rocm_var='?rocm')
+ xsdk_depends_on('magma@2.6.1', when='@0.7.0', cuda_var='?cuda', rocm_var='?rocm')
+ xsdk_depends_on('magma@2.5.4', when='@0.6.0', cuda_var='?cuda')
+ xsdk_depends_on('magma@2.5.1', when='@0.5.0', cuda_var='?cuda')
+ xsdk_depends_on('magma@2.4.0', when='@0.4.0', cuda_var='?cuda')
+ xsdk_depends_on('magma@2.2.0', when='@0.3.0', cuda_var='?cuda')
+
+ xsdk_depends_on('amrex@develop+sundials',
+ when='@develop %intel', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('amrex@develop+sundials',
+ when='@develop %gcc', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('amrex@develop+sundials',
+ when='@develop %cce', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('amrex@21.10+sundials',
+ when='@0.7.0 %intel', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('amrex@21.10+sundials',
+ when='@0.7.0 %gcc', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('amrex@21.10+sundials',
+ when='@0.7.0 %cce', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('amrex@20.10', when='@0.6.0 %intel')
+ xsdk_depends_on('amrex@20.10', when='@0.6.0 %gcc')
+ xsdk_depends_on('amrex@19.08', when='@0.5.0 %intel')
+ xsdk_depends_on('amrex@19.08', when='@0.5.0 %gcc')
+ xsdk_depends_on('amrex@18.10.1', when='@0.4.0 %intel')
+ xsdk_depends_on('amrex@18.10.1', when='@0.4.0 %gcc')
+
+ xsdk_depends_on('slepc@main', when='@develop')
+ xsdk_depends_on('slepc@3.16.0', when='@0.7.0')
+ xsdk_depends_on('slepc@3.14.0', when='@0.6.0')
+ xsdk_depends_on('slepc@3.12.0', when='@0.5.0')
+ xsdk_depends_on('slepc@3.10.1', when='@0.4.0')
+
+ xsdk_depends_on('omega-h +trilinos', when='+trilinos +omega-h')
+ xsdk_depends_on('omega-h ~trilinos', when='~trilinos +omega-h')
+ xsdk_depends_on('omega-h@main', when='@develop +omega-h')
+ xsdk_depends_on('omega-h@9.34.1', when='@0.7.0 +omega-h')
+ xsdk_depends_on('omega-h@9.32.5', when='@0.6.0 +omega-h')
+ xsdk_depends_on('omega-h@9.29.0', when='@0.5.0 +omega-h')
+ xsdk_depends_on('omega-h@9.19.1', when='@0.4.0 +omega-h')
+
+ xsdk_depends_on('strumpack ~cuda', when='~cuda @0.6.0: +strumpack')
+ xsdk_depends_on('strumpack@master~slate~openmp', when='@develop +strumpack')
+ xsdk_depends_on('strumpack@6.1.0~slate~openmp', when='@0.7.0 +strumpack')
+ xsdk_depends_on('strumpack@5.0.0~slate~openmp', when='@0.6.0 +strumpack')
+ xsdk_depends_on('strumpack@3.3.0~slate~openmp', when='@0.5.0 +strumpack')
+ xsdk_depends_on('strumpack@3.1.1~slate~openmp', when='@0.4.0 +strumpack')
+
+ xsdk_depends_on('pumi@master', when='@develop')
+ xsdk_depends_on('pumi@2.2.6', when='@0.7.0')
+ xsdk_depends_on('pumi@2.2.5', when='@0.6.0')
+ xsdk_depends_on('pumi@2.2.1', when='@0.5.0')
+ xsdk_depends_on('pumi@2.2.0', when='@0.4.0')
tasmanian_openmp = '~openmp' if sys.platform == 'darwin' else '+openmp'
- depends_on('tasmanian@develop+xsdkflags+blas' + tasmanian_openmp, when='@develop')
- depends_on('tasmanian@develop+xsdkflags+blas+cuda+magma' + tasmanian_openmp, when='@develop +cuda')
- depends_on('tasmanian@7.3+xsdkflags+mpi+blas' + tasmanian_openmp, when='@0.6.0')
- depends_on('tasmanian@7.3+xsdkflags+mpi+blas+cuda+magma' + tasmanian_openmp, when='@0.6.0 +cuda')
- depends_on('tasmanian@7.0+xsdkflags+mpi+blas' + tasmanian_openmp, when='@0.5.0')
- depends_on('tasmanian@7.0+xsdkflags+mpi+blas+cuda+magma' + tasmanian_openmp, when='@0.5.0 +cuda')
- depends_on('tasmanian@6.0+xsdkflags+blas~openmp', when='@0.4.0')
- depends_on('tasmanian@6.0+xsdkflags+blas+cuda+magma~openmp', when='@0.4.0 +cuda')
-
- depends_on('arborx@1.0', when='@develop +arborx')
+ xsdk_depends_on('tasmanian@develop+xsdkflags+blas' + tasmanian_openmp,
+ when='@develop',
+ cuda_var=['cuda', '?magma'], rocm_var=['rocm', '?magma'])
+ xsdk_depends_on('tasmanian@7.7+xsdkflags+mpi+blas' + tasmanian_openmp,
+ when='@0.7.0', cuda_var=['cuda', '?magma'])
+ xsdk_depends_on('tasmanian@7.3+xsdkflags+mpi+blas' + tasmanian_openmp,
+ when='@0.6.0', cuda_var=['cuda', '?magma'])
+ xsdk_depends_on('tasmanian@7.0+xsdkflags+mpi+blas' + tasmanian_openmp,
+ when='@0.5.0', cuda_var=['cuda', '?magma'])
+ xsdk_depends_on('tasmanian@6.0+xsdkflags+blas~openmp', when='@0.4.0',
+ cuda_var=['cuda', '?magma'])
+
+ xsdk_depends_on('arborx@master', when='@develop +arborx')
+ xsdk_depends_on('arborx@1.1', when='@0.7.0 +arborx')
# the Fortran 2003 bindings of phist require python@3:, but this
# creates a conflict with other packages like petsc@main. Actually
@@ -177,45 +304,54 @@ class Xsdk(BundlePackage):
# This will be fixed once the new concretizer becomes available
# (says @adamjstewart)
- depends_on('phist kernel_lib=tpetra', when='+trilinos +phist')
- depends_on('phist kernel_lib=petsc', when='~trilinos +phist')
- depends_on('phist@develop ~fortran ~scamac ~openmp ~host ~int64', when='@develop +phist')
- depends_on('phist@1.9.3 ~fortran ~scamac ~openmp ~host ~int64', when='@0.6.0 +phist')
- depends_on('phist@1.8.0 ~fortran ~scamac ~openmp ~host ~int64', when='@0.5.0 +phist')
- depends_on('phist@1.7.5 ~fortran ~scamac ~openmp ~host ~int64', when='@0.4.0 +phist')
-
- depends_on('ginkgo@develop ~openmp', when='@develop +ginkgo')
- depends_on('ginkgo@develop ~openmp+cuda', when='@develop +ginkgo +cuda')
- depends_on('ginkgo@1.3.0 ~openmp', when='@0.6.0 +ginkgo')
- depends_on('ginkgo@1.3.0 ~openmp+cuda', when='@0.6.0 +cuda +ginkgo')
- depends_on('ginkgo@1.1.0 ~openmp', when='@0.5.0 +ginkgo')
- depends_on('ginkgo@1.1.0 ~openmp+cuda', when='@0.5.0 +cuda +ginkgo')
-
- depends_on('py-libensemble@develop+petsc4py', type='run', when='@develop +libensemble')
- depends_on('py-petsc4py@main', type='run', when='@develop +libensemble')
- depends_on('py-libensemble@0.7.1+petsc4py', type='run', when='@0.6.0 +libensemble')
- depends_on('py-petsc4py@3.14.0', type='run', when='@0.6.0 +libensemble')
- depends_on('py-libensemble@0.5.2+petsc4py', type='run', when='@0.5.0 +libensemble')
- depends_on('py-petsc4py@3.12.0', type='run', when='@0.5.0 +libensemble')
-
- depends_on('precice ~petsc', when='platform=cray +precice')
- depends_on('precice@develop', when='@develop +precice')
- depends_on('precice@2.1.1', when='@0.6.0 +precice')
- depends_on('precice@1.6.1', when='@0.5.0 +precice')
-
- depends_on('butterflypack@master', when='@develop +butterflypack')
- depends_on('butterflypack@1.2.1', when='@0.6.0 +butterflypack')
- depends_on('butterflypack@1.1.0', when='@0.5.0 +butterflypack')
-
- depends_on('heffte +fftw+cuda+magma', when='+cuda +heffte')
- depends_on('openmpi +cuda', when='+cuda +heffte')
- depends_on('heffte@develop+fftw', when='@develop +heffte')
- depends_on('heffte@2.0.0+fftw', when='@0.6.0 +heffte')
-
- depends_on('slate@master ~cuda', when='@develop ~cuda +slate %gcc@6.0:')
- depends_on('slate@master +cuda', when='@develop +cuda +slate %gcc@6.0:')
- depends_on('slate@2020.10.00 ~cuda', when='@0.6.0 ~cuda +slate %gcc@6.0:')
- depends_on('slate@2020.10.00 +cuda', when='@0.6.0 +cuda +slate %gcc@6.0:')
-
- # How do we propagate debug flag to all depends on packages ?
- # If I just do spack install xsdk+debug will that propogate it down?
+ xsdk_depends_on('phist kernel_lib=tpetra', when='+trilinos +phist')
+ xsdk_depends_on('phist kernel_lib=petsc', when='~trilinos +phist')
+ xsdk_depends_on('phist@develop ~fortran ~scamac ~openmp ~host ~int64',
+ when='@develop +phist')
+ xsdk_depends_on('phist@1.9.5 ~fortran ~scamac ~openmp ~host ~int64',
+ when='@0.7.0 +phist')
+ xsdk_depends_on('phist@1.9.3 ~fortran ~scamac ~openmp ~host ~int64',
+ when='@0.6.0 +phist')
+ xsdk_depends_on('phist@1.8.0 ~fortran ~scamac ~openmp ~host ~int64',
+ when='@0.5.0 +phist')
+ xsdk_depends_on('phist@1.7.5 ~fortran ~scamac ~openmp ~host ~int64',
+ when='@0.4.0 +phist')
+
+ xsdk_depends_on('ginkgo@develop ~openmp', when='@develop +ginkgo',
+ cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('ginkgo@1.4.0 ~openmp',
+ when='@0.7.0 +ginkgo', cuda_var='cuda', rocm_var='rocm')
+ xsdk_depends_on('ginkgo@1.3.0 ~openmp',
+ when='@0.6.0 +ginkgo', cuda_var='cuda')
+ xsdk_depends_on('ginkgo@1.1.0 ~openmp', when='@0.5.0 +ginkgo')
+
+ xsdk_depends_on('py-libensemble@develop+petsc4py', when='@develop +libensemble')
+ xsdk_depends_on('py-petsc4py@main', when='@develop +libensemble')
+ xsdk_depends_on('py-libensemble@0.8.0+petsc4py', when='@0.7.0 +libensemble')
+ xsdk_depends_on('py-petsc4py@3.16.1', when='@0.7.0 +libensemble')
+ xsdk_depends_on('py-libensemble@0.7.1+petsc4py', when='@0.6.0 +libensemble')
+ xsdk_depends_on('py-petsc4py@3.14.0', when='@0.6.0 +libensemble')
+ xsdk_depends_on('py-libensemble@0.5.2+petsc4py', when='@0.5.0 +libensemble')
+ xsdk_depends_on('py-petsc4py@3.12.0', when='@0.5.0 +libensemble')
+
+ xsdk_depends_on('precice ~petsc', when='platform=cray +precice')
+ xsdk_depends_on('precice@develop', when='@develop +precice')
+ xsdk_depends_on('precice@2.3.0', when='@0.7.0 +precice')
+ xsdk_depends_on('precice@2.1.1', when='@0.6.0 +precice')
+ xsdk_depends_on('precice@1.6.1', when='@0.5.0 +precice')
+
+ xsdk_depends_on('butterflypack@master', when='@develop +butterflypack')
+ xsdk_depends_on('butterflypack@2.0.0', when='@0.7.0 +butterflypack')
+ xsdk_depends_on('butterflypack@1.2.1', when='@0.6.0 +butterflypack')
+ xsdk_depends_on('butterflypack@1.1.0', when='@0.5.0 +butterflypack')
+
+ xsdk_depends_on('heffte@develop+fftw', when='@develop +heffte',
+ cuda_var=['cuda', '?magma'], rocm_var=['rocm', '?magma'])
+ xsdk_depends_on('heffte@2.2.0+fftw', when='@0.7.0 +heffte',
+ cuda_var=['cuda', '?magma'], rocm_var=['rocm', '?magma'])
+ xsdk_depends_on('heffte@2.0.0+fftw', when='@0.6.0 +heffte',
+ cuda_var=['cuda', '?magma'])
+
+ xsdk_depends_on('slate@master', when='@develop +slate %gcc@6.0:', cuda_var='cuda')
+ xsdk_depends_on('slate@2021.05.02', when='@0.7.0 +slate %gcc@6.0:', cuda_var='cuda')
+ xsdk_depends_on('slate@2020.10.00', when='@0.6.0 +slate %gcc@6.0:', cuda_var='cuda')